|
@@ -18,24 +18,23 @@ import scala.collection.mutable.ArrayBuffer
|
|
*/
|
|
*/
|
|
case class JudicialCaseRelation_CaseAgg(s: SparkSession,
|
|
case class JudicialCaseRelation_CaseAgg(s: SparkSession,
|
|
project: String //表所在工程名
|
|
project: String //表所在工程名
|
|
- ) extends LoggingUtils with Logging with BaseFunc {
|
|
|
|
|
|
+ ) extends LoggingUtils with Logging with BaseFunc {
|
|
@(transient@getter) val spark: SparkSession = s
|
|
@(transient@getter) val spark: SparkSession = s
|
|
- private val table_id_map = Map("justicase" -> "case_id")
|
|
|
|
|
|
+ private val table_id_map = Map("wenshu_detail" -> "case_id")
|
|
private val pat = ".*\\d+.*".r
|
|
private val pat = ".*\\d+.*".r
|
|
|
|
|
|
import spark.implicits._
|
|
import spark.implicits._
|
|
|
|
|
|
- def etl(): Unit = {
|
|
|
|
- val ds = "20200913"
|
|
|
|
|
|
+ def etl(ds: String): Unit = {
|
|
etl_wenshu(ds)
|
|
etl_wenshu(ds)
|
|
relationByGroup()
|
|
relationByGroup()
|
|
}
|
|
}
|
|
|
|
|
|
private def etl_wenshu(ds: String): Unit = {
|
|
private def etl_wenshu(ds: String): Unit = {
|
|
- def tableName = "justicase"
|
|
|
|
|
|
+ def tableName = "wenshu_detail"
|
|
|
|
|
|
val table_id = table_id_map(tableName)
|
|
val table_id = table_id_map(tableName)
|
|
- val other_cols = Seq("yg_name", "court_name", "case_no", "bg_name") ++ Seq(table_id,"ds","connect_case_no")
|
|
|
|
|
|
+ val other_cols = Seq("yg_name", "court_name", "case_no", "bg_name") ++ Seq(table_id, "ds", "connect_case_no")
|
|
|
|
|
|
val ods_end_ds = getLastPartitionsOrElse(s"winhc_eci_dev.ods_$tableName", "0")
|
|
val ods_end_ds = getLastPartitionsOrElse(s"winhc_eci_dev.ods_$tableName", "0")
|
|
val tmp_tab = s"all_${tableName}_tmp_$ods_end_ds"
|
|
val tmp_tab = s"all_${tableName}_tmp_$ods_end_ds"
|
|
@@ -49,11 +48,12 @@ case class JudicialCaseRelation_CaseAgg(s: SparkSession,
|
|
| FROM (
|
|
| FROM (
|
|
| SELECT ${other_cols.mkString(",")}
|
|
| SELECT ${other_cols.mkString(",")}
|
|
| FROM winhc_eci_dev.ods_$tableName
|
|
| FROM winhc_eci_dev.ods_$tableName
|
|
- | WHERE ds = '$ods_end_ds'
|
|
|
|
- | UNION ALL
|
|
|
|
- | SELECT ${other_cols.mkString(",")}
|
|
|
|
- | FROM winhc_eci_dev.inc_ods_$tableName
|
|
|
|
- | WHERE ds > $ods_end_ds
|
|
|
|
|
|
+ | WHERE ds > 0
|
|
|
|
+ | --- WHERE ds = '$ods_end_ds'
|
|
|
|
+ | --- UNION ALL
|
|
|
|
+ | --- SELECT ${other_cols.mkString(",")}
|
|
|
|
+ | --- FROM winhc_eci_dev.inc_ods_$tableName
|
|
|
|
+ | --- WHERE ds > $ods_end_ds
|
|
| ) AS t1
|
|
| ) AS t1
|
|
| ) AS t2
|
|
| ) AS t2
|
|
|WHERE t2.num = 1
|
|
|WHERE t2.num = 1
|
|
@@ -163,7 +163,7 @@ case class JudicialCaseRelation_CaseAgg(s: SparkSession,
|
|
|
|
|
|
sql(
|
|
sql(
|
|
s"""
|
|
s"""
|
|
- |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE winhc_eci_dev.xjk_ads_judicial_case_relation1
|
|
|
|
|
|
+ |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE winhc_eci_dev.ads_judicial_case_relation
|
|
|SELECT id_1
|
|
|SELECT id_1
|
|
| ,id_2
|
|
| ,id_2
|
|
| ,case_no_1
|
|
| ,case_no_1
|
|
@@ -238,14 +238,20 @@ case class JudicialCaseRelation_CaseAgg(s: SparkSession,
|
|
object JudicialCaseRelation_CaseAgg {
|
|
object JudicialCaseRelation_CaseAgg {
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
+
|
|
|
|
+ val Array(ds) = args
|
|
|
|
+
|
|
|
|
+ println(
|
|
|
|
+ s"""
|
|
|
|
+ |ds: $ds
|
|
|
|
+ |""".stripMargin)
|
|
val config = mutable.Map(
|
|
val config = mutable.Map(
|
|
"spark.hadoop.odps.project.name" -> "winhc_eci_dev",
|
|
"spark.hadoop.odps.project.name" -> "winhc_eci_dev",
|
|
"spark.hadoop.odps.spark.local.partition.amt" -> "1000"
|
|
"spark.hadoop.odps.spark.local.partition.amt" -> "1000"
|
|
)
|
|
)
|
|
val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
|
|
val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
|
|
val jcr = JudicialCaseRelation_CaseAgg(spark, project = "winhc_eci_dev")
|
|
val jcr = JudicialCaseRelation_CaseAgg(spark, project = "winhc_eci_dev")
|
|
- jcr.etl()
|
|
|
|
-// jcr.relationByGroup()
|
|
|
|
|
|
+ jcr.etl(ds)
|
|
spark.stop()
|
|
spark.stop()
|
|
}
|
|
}
|
|
}
|
|
}
|