|
@@ -0,0 +1,141 @@
|
|
|
+package com.winhc.bigdata.spark.jobs.dynamic
|
|
|
+
|
|
|
+import java.util.Date
|
|
|
+
|
|
|
+import com.winhc.bigdata.spark.config.EsConfig
|
|
|
+import com.winhc.bigdata.spark.utils.ReflectUtils.getClazz
|
|
|
+import com.winhc.bigdata.spark.utils.{LoggingUtils, SparkUtils}
|
|
|
+import org.apache.commons.lang3.time.DateFormatUtils
|
|
|
+import org.apache.spark.internal.Logging
|
|
|
+import org.apache.spark.sql.types.StringType
|
|
|
+import org.apache.spark.sql.{Row, SparkSession}
|
|
|
+
|
|
|
+import scala.annotation.meta.getter
|
|
|
+import scala.collection.immutable.ListMap
|
|
|
+import scala.collection.mutable
|
|
|
+
|
|
|
+/**
|
|
|
+ * @Author: XuJiakai
|
|
|
+ * @Date: 2020/7/27 16:52
|
|
|
+ * @Description: 企业动态
|
|
|
+ */
|
|
|
+object CompanyDynamic {
|
|
|
+
|
|
|
+ case class CompanyDynamicUtil(s: SparkSession,
|
|
|
+ project: String, //表所在工程名
|
|
|
+
|
|
|
+ ds: String //此维度主键
|
|
|
+ ) extends LoggingUtils with Logging {
|
|
|
+ @(transient@getter) val spark: SparkSession = s
|
|
|
+
|
|
|
+ private val env = "dev"
|
|
|
+ var cleanFlag = false
|
|
|
+ val targetTab = "xjk_tmp_company_dynamic"
|
|
|
+
|
|
|
+ def init(): Unit = {
|
|
|
+ sql(
|
|
|
+ s"""
|
|
|
+ |CREATE TABLE IF NOT EXISTS ${getEnvProjectName(env, project)}.$targetTab
|
|
|
+ |(
|
|
|
+ | cid STRING COMMENT '公司id'
|
|
|
+ | ,info_type STRING COMMENT '变更分类,大类'
|
|
|
+ | ,rta_desc STRING COMMENT '变更信息描述,变更标题'
|
|
|
+ | ,change_content STRING COMMENT '变更内容'
|
|
|
+ | ,change_time STRING COMMENT '变更时间'
|
|
|
+ | ,biz_id STRING COMMENT '业务id,数据行id'
|
|
|
+ | ,sub_info_type STRING COMMENT '变更小类,表名'
|
|
|
+ | ,info_risk_level STRING COMMENT '变更风险等级'
|
|
|
+ | ,winhc_suggest STRING COMMENT '提示信息'
|
|
|
+ | ,create_time STRING COMMENT '创建时间'
|
|
|
+ |)
|
|
|
+ |COMMENT '企业动态输出表'
|
|
|
+ |PARTITIONED BY (ds STRING COMMENT '分区')
|
|
|
+ |LIFECYCLE 30
|
|
|
+ |""".stripMargin)
|
|
|
+ }
|
|
|
+
|
|
|
+ //表名(不加前后辍)
|
|
|
+ def calc(tableName: String): Unit = {
|
|
|
+ val handle = getClazz[CompanyDynamicHandle](s"com.winhc.bigdata.spark.jobs.dynamic.tables.$tableName")
|
|
|
+
|
|
|
+ val types = handle.org_type()
|
|
|
+ val rdd = sql(
|
|
|
+ s"""
|
|
|
+ |SELECT *
|
|
|
+ |FROM winhc_eci_dev.ads_change_extract
|
|
|
+ |WHERE ds = '$ds'
|
|
|
+ |AND tn = '$tableName'
|
|
|
+ |AND TYPE in (${types.map("'" + _ + "'").mkString(",")})
|
|
|
+ |""".stripMargin)
|
|
|
+ .rdd.map(r => {
|
|
|
+ val rowkey = r.getAs[String]("rowkey")
|
|
|
+ val cid = r.getAs[String]("cid")
|
|
|
+ val new_data = r.getAs[Map[String, String]]("data")
|
|
|
+ val old_data = r.getAs[Map[String, String]]("old_data")
|
|
|
+ val biz_date = r.getAs[String]("biz_date")
|
|
|
+ val fields = r.getAs[String]("fields")
|
|
|
+ val res = handle.handle(rowkey, biz_date, cid, if (fields == null) null else fields.split(","), old_data, new_data)
|
|
|
+ Row(cid, res._1, res._2, res._3, res._4, res._5, res._6, res._7, res._8, DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss"))
|
|
|
+ })
|
|
|
+
|
|
|
+ val schema = getSchema(ListMap(
|
|
|
+ "cid" -> StringType
|
|
|
+ , "info_type" -> StringType
|
|
|
+ , "rta_desc" -> StringType
|
|
|
+ , "change_content" -> StringType
|
|
|
+ , "change_time" -> StringType
|
|
|
+ , "biz_id" -> StringType
|
|
|
+ , "sub_info_type" -> StringType
|
|
|
+ , "info_risk_level" -> StringType
|
|
|
+ , "winhc_suggest" -> StringType
|
|
|
+ , "create_time" -> StringType
|
|
|
+ ))
|
|
|
+ spark.createDataFrame(rdd, schema)
|
|
|
+ .createOrReplaceTempView("company_dynamic_tmp")
|
|
|
+
|
|
|
+ if (!cleanFlag) {
|
|
|
+ sql(
|
|
|
+ s"""
|
|
|
+ |alter table ${getEnvProjectName(env, project)}.$targetTab drop if exists partition(ds='$ds')
|
|
|
+ |""".stripMargin)
|
|
|
+ cleanFlag = true
|
|
|
+ }
|
|
|
+
|
|
|
+ val cols = getColumns(s"$project.$targetTab").filter(!_.equals("ds"))
|
|
|
+
|
|
|
+ sql(
|
|
|
+ s"""
|
|
|
+ |INSERT INTO TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds')
|
|
|
+ |SELECT ${cols.mkString(",")}
|
|
|
+ |FROM
|
|
|
+ | company_dynamic_tmp
|
|
|
+ |""".stripMargin)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ def main(args: Array[String]): Unit = {
|
|
|
+ val Array(project, tableName, ds) = args
|
|
|
+
|
|
|
+ println(
|
|
|
+ s"""
|
|
|
+ |project: $project
|
|
|
+ |tableNames: $tableName
|
|
|
+ |ds: $ds
|
|
|
+ |""".stripMargin)
|
|
|
+
|
|
|
+ val config = EsConfig.getEsConfigMap ++ mutable.Map(
|
|
|
+ "spark.hadoop.odps.project.name" -> project,
|
|
|
+ "spark.hadoop.odps.spark.local.partition.amt" -> "10"
|
|
|
+ )
|
|
|
+ val spark = SparkUtils.InitEnv("CompanyDynamic", config)
|
|
|
+ val cd = CompanyDynamicUtil(spark, project, ds)
|
|
|
+
|
|
|
+ cd.init()
|
|
|
+
|
|
|
+ for (e <- tableName.split(",")) {
|
|
|
+ cd.calc(e)
|
|
|
+ }
|
|
|
+ spark.stop()
|
|
|
+ }
|
|
|
+}
|