|
@@ -0,0 +1,29 @@
|
|
|
|
+package com.winhc.bigdata.spark.jobs
|
|
|
|
+
|
|
|
|
+import com.winhc.bigdata.spark.utils.{CompanySummaryPro, SparkUtils}
|
|
|
|
+
|
|
|
|
+import scala.collection.mutable
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * @author: XuJiakai
|
|
|
|
+ * @date: 2020/10/27 15:04
|
|
|
|
+ */
|
|
|
|
+object inc_ads_bankruptcy_open_case_summary {
|
|
|
|
+ def main(args: Array[String]): Unit = {
|
|
|
|
+ val config = mutable.Map(
|
|
|
|
+ "spark.hadoop.odps.project.name" -> "winhc_eci",
|
|
|
|
+ "spark.debug.maxToStringFields" -> "200",
|
|
|
|
+ "spark.hadoop.odps.spark.local.partition.amt" -> "10000"
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ val spark = SparkUtils.InitEnv(getClass.getSimpleName, config)
|
|
|
|
+ CompanySummaryPro(s = spark
|
|
|
|
+ , project = "winhc_eci"
|
|
|
|
+ , tableName = "bankruptcy_open_case"
|
|
|
|
+ , cidField = "split(rowkey,'_')[0]"
|
|
|
|
+ ).calc(is_inc = false)
|
|
|
|
+
|
|
|
|
+ spark.stop()
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+}
|