Browse Source

财产线索全量

lyb 3 years ago
parent
commit
0276baf5fc
48 changed files with 2141 additions and 2 deletions
  1. 11 2
      src/main/scala/com/winhc/bigdata/spark/jobs/judicial/JudicialCaseRelationPre12.scala
  2. 373 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/ChangeExtractAll.scala
  3. 79 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/CompanyChangeHandle1.scala
  4. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/auction_tracking_list.scala
  5. 32 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/bankruptcy_open_case.scala
  6. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_bid_list.scala
  7. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_reg_list.scala
  8. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_works_list.scala
  9. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_court_open_announcement_list.scala
  10. 20 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_finance.scala
  11. 20 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder.scala
  12. 20 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder_v2.scala
  13. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_announcement.scala
  14. 31 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_mortgage.scala
  15. 42 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_mortgage_info.scala
  16. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_patent_list.scala
  17. 27 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_tm.scala
  18. 21 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_list.scala
  19. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_restrict.scala
  20. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/increase_registered_capital_info.scala
  21. 28 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/wenshu_detail_combine.scala
  22. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate.scala
  23. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate_results.scala
  24. 320 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/CompanyMonitor.scala
  25. 37 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/auction_tracking_list.scala
  26. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/bankruptcy_open_case.scala
  27. 77 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company.scala
  28. 21 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_bid_list.scala
  29. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_reg_list.scala
  30. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_works_list.scala
  31. 29 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_court_open_announcement_list.scala
  32. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_dishonest_info.scala
  33. 34 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_equity_info_list.scala
  34. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_finance.scala
  35. 76 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_holder_v2.scala
  36. 31 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_announcement.scala
  37. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage.scala
  38. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage_v2.scala
  39. 31 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_mortgage_info.scala
  40. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_patent_list.scala
  41. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_tm.scala
  42. 36 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_list.scala
  43. 27 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_restrict.scala
  44. 32 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/increase_registered_capital_info.scala
  45. 47 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine.scala
  46. 51 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine_v2.scala
  47. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate.scala
  48. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate_results.scala

+ 11 - 2
src/main/scala/com/winhc/bigdata/spark/jobs/judicial/JudicialCaseRelationPre12.scala

@@ -74,8 +74,17 @@ case class JudicialCaseRelationPre12(s: SparkSession, project: String
          |         ,case_create_time as date
          |         ,rowkey as detail_id
          |         ,exec_amount as case_amt
-         |      from $project.inc_ads_company_zxr_final_case
-         |      where length(case_no) > 0 and ds > '0'
+         |      from
+         |      (
+         |        select case_no,name,court_name,case_create_time,rowkey,update_time,exec_amount
+         |        from $project.ads_company_zxr_final_case
+         |        where length(case_no) > 0 and ds > '0'
+         |        union all
+         |        select case_no,name,court_name,case_create_time,rowkey,update_time,exec_amount
+         |        from $project.inc_ads_company_zxr_final_case
+         |        where length(case_no) > 0 and ds > '0'
+         |      )
+         |
          |      union all
          |      select
          |         md5(cleanup(case_no)) as judicase_id

+ 373 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/ChangeExtractAll.scala

@@ -0,0 +1,373 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change
+
+import com.winhc.bigdata.spark.config.EsConfig
+import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+import com.winhc.bigdata.spark.utils._
+import org.apache.spark.internal.Logging
+import org.apache.spark.sql.functions.col
+import org.apache.spark.sql.types.{MapType, StringType, StructField, StructType}
+import org.apache.spark.sql.{DataFrame, Row, SparkSession}
+
+import scala.annotation.meta.getter
+import scala.collection.mutable
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/7 11:25
+ * @Description: 筛选出数据的具体变更
+ */
+object ChangeExtractAll {
+
+  //判断两个map在指定key上是否相等,如不等反回不相等字段
+  def getDoubleDataMap(iterable: Iterable[Map[String, String]]): (Map[String, String], Map[String, String]) = {
+    val map = iterable.map(m => (m("change_flag"), m)).toMap
+    (map.getOrElse("0", null), map.getOrElse("1", null))
+  }
+
+  def getHandleClazz(tableName: String, equCols: Seq[String]): {def handle(rowkey: String, oldMap: Map[String, String], newMap: Map[String, String]): (String, String, String, Map[String, String], String, String, String, String, Map[String, String])} = {
+    val clazz = s"com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables.$tableName"
+    val foo = Class.forName(clazz)
+      .getConstructors.head.newInstance(equCols)
+      .asInstanceOf[ {
+      def handle(rowkey: String, oldMap: Map[String, String], newMap: Map[String, String]): (String, String, String, Map[String, String], String, String, String, String, Map[String, String])
+    }]
+    foo
+  }
+
+
+  case class ChangeExtractHandle(s: SparkSession,
+                                 project: String, //表所在工程名
+                                 tableName1: String, //表名(不加前后辍)
+                                 primaryKey: String, //此维度主键
+                                 inc_ds: String, //需要计算的分区
+                                 primaryFields: Seq[String] //主要字段,该字段任意一个不同 则认为发生变化
+                                ) extends LoggingUtils with Logging {
+    @(transient@getter) val spark: SparkSession = s
+
+
+    val target_eci_change_extract = "ads_change_extract_all"
+
+    val updateTimeMapping = Map(
+      "wenshu_detail_combine" -> "update_date", //文书排序时间
+      "company_equity_info_list" -> "reg_date" //文书排序时间
+    )
+    //不同name映射table
+    val tabMapping =
+      Map("company_holder_v2" -> "company_holder"//胜诉案件
+      )
+
+    //转换字段
+    def trans(s: String): String = {
+      var res = s
+      if (tabMapping.contains(s)) {
+        res = tabMapping(s)
+      }
+      res
+    }
+
+    def calc(isCopy: Boolean = true): Unit = {
+      val tableName = trans(tableName1)
+      val cols = primaryFields.filter(!_.equals(primaryKey)).seq
+
+      val ds = inc_ds.replace("-", "")
+
+      val intersectCols = getColumns(s"$project.ads_$tableName").toSet & getColumns(s"$project.inc_ads_$tableName").toSet
+
+      val otherAllCols = intersectCols.filter(!primaryKey.equals(_)).toSeq
+      val all_cols = primaryKey +: otherAllCols :+ "change_flag"
+
+      val lastDs_ads_all = getLastPartitionsOrElse(s"$project.ads_$tableName", "0")
+
+      val handle = ReflectUtils.getClazz[CompanyChangeHandle1](s"com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables.$tableName1", cols)
+      //      val handle = getHandleClazz(tableName, cols)
+
+      var condition = handle.getCondition()
+
+      val update_time = BaseUtil.nowDate()
+
+      var df: DataFrame = null
+      isCopy match {
+        case true => {
+          sql(
+            s"""
+               |SELECT  cid,current_cid as new_cid
+               |FROM    ${project}.inc_ads_company
+               |WHERE   ds = '${getLastPartitionsOrElse(s"$project.inc_ads_company", "0")}'
+               |AND     cid IS NOT NULL
+               |AND     current_cid IS NOT NULL
+               |GROUP BY cid,current_cid
+               |""".stripMargin).createOrReplaceTempView("mapping")
+
+          val cid = getColumns(s"$project.ads_$tableName").filter(f => f.equals("cid") || f.equals("new_cid")).max
+
+          primaryKey.equals("rowkey") match {
+            case true => {
+              df = sql(
+                s"""
+                   |SELECT  t2.$primaryKey,${otherAllCols.map("t2." + _).mkString(",")},'0' as change_flag
+                   |FROM    (
+                   |
+                   |             SELECT  concat_ws('_',coalesce(mm.new_cid,tmp.$cid),split(rowkey, '_')[1]) AS rowkey
+                   |                     ,${intersectCols.diff(Set("rowkey", "cid", "new_cid")).mkString(",")}
+                   |                     ,coalesce(mm.new_cid,tmp.$cid) AS new_cid
+                   |                     ,tmp.$cid as cid
+                   |                     ,c
+                   |             FROM    (
+                   |                         SELECT  a.*
+                   |                                 ,row_number() OVER (PARTITION BY a.${primaryKey} ORDER BY ${updateTimeMapping.getOrElse(tableName, "update_time")} DESC) c
+                   |                         FROM    (
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.ads_$tableName
+                   |                                     WHERE   ds = $lastDs_ads_all  ${condition}
+                   |                                     UNION ALL
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.inc_ads_$tableName
+                   |                                     WHERE   ds > $lastDs_ads_all ${condition}
+                   |                                 ) AS a
+                   |                     ) AS tmp
+                   |             LEFT JOIN mapping mm
+                   |             ON tmp.$cid = mm.cid
+                   |             WHERE   tmp.c = 1
+                   |        ) AS t2
+                   |""".stripMargin)
+            }
+            case false => {
+              df = sql(
+                s"""
+                   |SELECT  t2.$primaryKey,${otherAllCols.map("t2." + _).mkString(",")},'0' as change_flag
+                   |FROM    (
+                   |
+                   |             SELECT  ${intersectCols.diff(Set("rowkey", cid)).mkString(",")}
+                   |                     ,coalesce(mm.new_cid,tmp.$cid) AS $cid
+                   |             FROM    (
+                   |                         SELECT  a.*
+                   |                                 ,row_number() OVER (PARTITION BY a.${primaryKey} ORDER BY ${updateTimeMapping.getOrElse(tableName, "update_time")} DESC) c
+                   |                         FROM    (
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.ads_$tableName
+                   |                                     WHERE   ds = $lastDs_ads_all ${condition}
+                   |                                     UNION ALL
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.inc_ads_$tableName
+                   |                                     WHERE   ds > $lastDs_ads_all ${condition}
+                   |                                 ) AS a
+                   |                     ) AS tmp
+                   |             LEFT JOIN mapping mm
+                   |             ON tmp.$cid = mm.cid
+                   |             WHERE   tmp.c = 1
+                   |        ) AS t2
+                   |""".stripMargin)
+            }
+          }
+
+
+        }
+        case false => {
+          df = sql(
+            s"""
+
+               |SELECT  t2.$primaryKey,${otherAllCols.map("t2." + _).mkString(",")},'0' as change_flag
+               |FROM
+               |   (
+               |             SELECT  tmp.*
+               |             FROM    (
+               |                         SELECT  a.*
+               |                                 ,row_number() OVER (PARTITION BY a.${primaryKey} ORDER BY ${updateTimeMapping.getOrElse(tableName, "update_time")} DESC) c
+               |                         FROM    (
+               |                                     SELECT  ${intersectCols.mkString(",")}
+               |                                     FROM    $project.ads_$tableName
+               |                                     WHERE   ds = $lastDs_ads_all ${condition}
+               |                                     UNION ALL
+               |                                     SELECT  ${intersectCols.mkString(",")}
+               |                                     FROM    $project.inc_ads_$tableName
+               |                                     WHERE   ds > $lastDs_ads_all ${condition}
+               |                                 ) AS a
+               |                     ) AS tmp
+               |             WHERE   tmp.c = 1
+               |        ) AS t2
+               |""".stripMargin)
+        }
+      }
+
+
+      val rdd =
+        df.select(all_cols.map(column => col(column).cast("string")): _*)
+          .rdd.map(r => {
+          (r.getAs[String](primaryKey), all_cols.map(f => (f, r.getAs[String](f))).toMap)
+        }).groupByKey()
+          .map(x => {
+            val rowkey = x._1
+            val map_list = x._2
+            //          try {
+            //            if (map_list.size == 1) {
+            //              val res = handle.handle(rowkey, null, map_list.head)
+            //              Row(res._1, res._2, tableName, res._3, res._4, res._5, res._6, res._7, res._8, update_time, res._9)
+            //            } else {
+            //              if (map_list.size > 2) {
+            //                logInfo("list.size > 2! rowkey:" + rowkey)
+            //              }
+            val m = getDoubleDataMap(map_list)
+
+            val new_map = m._1
+            val old_map = m._2
+            if (new_map == null && old_map == null) {
+              null
+            } else if (old_map == null) {
+              val res = handle.handle(rowkey, null, map_list.head)
+              if (res == null) {
+                null
+              } else {
+                Row(res._1, res._2, tableName, res._3, res._4, res._5, res._6, res._7, res._8, update_time, res._9)
+              }
+            } else if (new_map == null) {
+              null
+            } else {
+              val res = handle.handle(rowkey, old_map, new_map)
+              if (res == null) {
+                null
+              } else {
+                Row(res._1, res._2, tableName, res._3, res._4, res._5, res._6, res._7, res._8, update_time, res._9)
+              }
+            }
+            //            }
+            /* } catch {
+               case e: Exception => {
+                 logError(s"xjk rowkey:$rowkey msg:${e.getMessage} equCols:$cols")
+                 logError(e.getMessage, e)
+                 println(s"xjk rowkey:$rowkey msg:${e.getMessage} equCols:$cols")
+               }
+                 null
+             }*/
+          }).filter(_ != null)
+
+      val schema = StructType(Array(
+        StructField("rowkey", StringType), //表数据主建
+        StructField("cid", StringType), //公司id
+        StructField("table_name", StringType), //表名
+        StructField("type", StringType), // 变更类型 insert update
+        StructField("data", MapType(StringType, StringType)), //变更后数据
+        StructField("fields", StringType), //如果是更新 则显示更新字段
+        StructField("title", StringType), // 动态数据展示 ps. 新增某土地公示
+        StructField("label", StringType), // 1.一般变更,2.风险变更
+        StructField("biz_time", StringType), //业务时间
+        StructField("update_time", StringType), //处理时间
+        StructField("old_data", MapType(StringType, StringType)) //变更前数据
+      ))
+
+      spark.createDataFrame(rdd, schema)
+        .createOrReplaceTempView(s"tmp_change_all_view$tableName1") //
+
+      sql(
+        s"""
+           |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${project}.$target_eci_change_extract PARTITION(ds='$ds',tn='$tableName1')
+           |SELECT *
+           |FROM
+           |    tmp_change_all_view$tableName1
+           |""".stripMargin)
+    }
+  }
+
+
+  // winhc_eci_dev company_tm rowkey 20200717 status_new
+  // winhc_eci_dev company_patent_list rowkey 20200717 lprs
+  // winhc_eci_dev company_certificate rowkey 20200707 type
+  // winhc_eci_dev company_copyright_works_list rowkey 20200717 type
+  // winhc_eci_dev company_copyright_reg_list rowkey 20200717 version
+  // winhc_eci_dev company_employment rowkey 20200630 source
+
+  // winhc_eci_dev company_land_publicity rowkey 20200717 title,location,use_for
+  // winhc_eci_dev company_land_announcement rowkey 20200717 e_number,project_name
+
+  // winhc_eci_dev company_bid_list rowkey 20200717 title
+  // winhc_eci_dev company_land_transfer rowkey 20200717 num,location
+  // winhc_eci_dev company_abnormal_info rowkey 20200717 remove_reason
+
+  // winhc_eci_dev company_own_tax rowkey 20200729 tax_balance,tax_category,tax_num
+
+
+  //winhc_eci_dev company_equity_info id 20200730 reg_number false
+
+
+  // winhc_eci_dev company cid 20200630 legal_entity_id,reg_location,business_scope,reg_status,reg_capital,emails,phones
+
+
+  private val startArgs = Seq(
+//    Args(tableName = "company_tm", primaryFields = "status_new")
+//    , Args(tableName = "company_patent_list", primaryFields = "lprs")
+     Args(tableName = "company_land_announcement", primaryFields = "e_number,project_name")
+    , Args(tableName = "company_bid_list", primaryFields = "title")
+    , Args(tableName = "company_zxr_list", primaryFields = "status")
+//    , Args(tableName = "company_copyright_works_list", primaryFields = "type")
+//    , Args(tableName = "company_copyright_reg_list", primaryFields = "version")
+    , Args(tableName = "company_land_mortgage", primaryFields = "land_num,source_url")
+    , Args(tableName = "bankruptcy_open_case", primaryFields = "case_no", isCopy = false) //破产重整
+    , Args(tableName = "company_mortgage_info", primaryFields = "reg_num") //动产抵押
+    , Args(tableName = "company_court_open_announcement_list", primaryFields = "case_reason") //开庭公告
+    , Args(tableName = "company_zxr_restrict", primaryFields = "status") //限制消费令,发现最新状态
+
+
+    , Args(tableName = "wenshu_detail_combine", primaryFields = "cname") //文书
+
+
+
+    , Args(tableName = "company_equity_info_list", primaryFields = "reg_number")
+    //公司名称,法人ID:人标识或公司标识,公司类型,注册地址,营业期限终止日期,经营范围,登记机关,企业状态                 ,注册资本,注销日期,注销原因
+    , Args(tableName = "company_finance", primaryFields = "round")
+    , Args(tableName = "company_dishonest_info", primaryFields = "status")
+//    , Args(tableName = "company_holder", primaryFields = "amount")
+//    , Args(tableName = "company_holder_v2", primaryFields = "deleted")
+    , Args(tableName = "increase_registered_capital_info", primaryFields = "change_time")
+    , Args(tableName = "auction_tracking_list", primaryFields = "auction_items_id")
+
+
+    , Args(tableName = "zxr_evaluate", primaryFields = "name,case_no,asset_name")
+    , Args(tableName = "zxr_evaluate_results", primaryFields = "name,case_no,asset_name")
+
+  )
+
+
+  private case class Args(project: String = "winhc_eci_dev"
+                          , tableName: String
+                          , primaryKey: String = "rowkey"
+                          , primaryFields: String
+                          , isCopy: Boolean = true)
+
+
+  def main(args: Array[String]): Unit = {
+    val Array(tableName, inc_ds) = args
+
+    val config = EsConfig.getEsConfigMap ++ mutable.Map(
+      "spark.hadoop.odps.project.name" -> "winhc_eci_dev",
+      "spark.hadoop.odps.spark.local.partition.amt" -> "100"
+    )
+    val spark = SparkUtils.InitEnv("MonitorChangeAll", config)
+
+
+    var start = startArgs
+    if (!tableName.equals("all")) {
+      val set = tableName.split(",").toSet
+      start = start.filter(a => set.contains(a.tableName))
+    }
+
+    val a = start.map(e => (e.tableName, () => {
+      ChangeExtractHandle(spark, e.project, e.tableName, e.primaryKey, inc_ds, e.primaryFields.split(",")).calc(e.isCopy)
+      true
+    }))
+
+    AsyncExtract.startAndWait(spark, a)
+
+    /* if (tableName.equals("all")) {
+       startArgs.foreach(e => {
+         ChangeExtractHandle(spark, e.project, e.tableName, e.primaryKey, inc_ds, e.primaryFields.split(",")).calc(e.isCopy)
+       })
+     } else {
+       val set = tableName.split(",").toSet
+       startArgs.filter(a => set.contains(a.tableName)).foreach(e => {
+         ChangeExtractHandle(spark, e.project, e.tableName, e.primaryKey, inc_ds, e.primaryFields.split(",")).calc(e.isCopy)
+       })
+     }*/
+
+    spark.stop()
+  }
+
+}

+ 79 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/CompanyChangeHandle1.scala

@@ -0,0 +1,79 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change
+
+import com.winhc.bigdata.spark.utils.BaseUtil.cleanup
+import org.apache.commons.lang3.StringUtils
+import org.apache.spark.internal.Logging
+
+import scala.annotation.meta.{getter, setter}
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+trait CompanyChangeHandle1 extends Serializable with Logging {
+  @getter
+  @setter
+  protected val equCols: Seq[String]
+
+  /**
+   *
+   * @param rowkey
+   * @param oldMap
+   * @param newMap
+   * @return rowkey,cid,类型【insert or update】,新数据,更新字段,更新标题,变更标签【1.一般变更,2.风险变更 ...】,业务时间
+   */
+  def handle(rowkey: String, oldMap: Map[String, String], newMap: Map[String, String]): (String, String, String, Map[String, String], String, String, String, String, Map[String, String]) = {
+    if(getBizTime(newMap)==null){
+      return null
+    }
+    if (oldMap == null) {
+      (rowkey, getCid(rowkey, newMap), "insert", newMap, null, getInsertTitle(newMap), getLabel(oldMap, newMap), getBizTime(newMap), null)
+    } else {
+      val t = getEquAndFields(oldMap, newMap)
+      if (t._1) {
+        null
+      } else {
+        (rowkey, getCid(rowkey, newMap), "update", newMap,
+          t._2
+          , getUpdateTitle(newMap), getLabel(oldMap, newMap), getBizTime(newMap), oldMap)
+      }
+    }
+  }
+
+  def getCid(rowkey: String, newMap: Map[String, String]): String = rowkey.split("_")(0)
+
+  def getUpdateTitle(newMap: Map[String, String]): String
+
+  def getInsertTitle(newMap: Map[String, String]): String
+
+  def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String
+
+  def getBizTime(newMap: Map[String, String]): String
+
+  def getEquAndFields(oldMap: Map[String, String], newMap: Map[String, String]): (Boolean, String) = {
+    val tmp = equCols.map(f => {
+      (f, cleanup(newMap(f)).equals(cleanup(oldMap(f))))
+    })
+    val eq = tmp.map(_._2).reduce((a1, a2) => a1 && a2)
+    if (eq) {
+      (true, null)
+    } else {
+      (eq, tmp.filter(!_._2).map(_._1).mkString(","))
+    }
+  }
+
+
+  protected def getValueOrNull(value: String, callBack: String): String = {
+    if (StringUtils.isNotBlank(value)) {
+      callBack
+    } else {
+      null
+    }
+  }
+
+  def getCondition():String ={
+    ""
+  }
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/auction_tracking_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/11
+ * @Description:司法拍卖
+ */
+
+case class auction_tracking_list(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+  override def getUpdateTitle(newMap: Map[String, String]): String = s"司法拍卖发生变更"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = s"新增司法拍卖"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "司法拍卖", Array("auction_items_id"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("end_time"), newMap("update_time"))
+  }
+
+
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/bankruptcy_open_case.scala

@@ -0,0 +1,32 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/5
+ * @Description:
+ */
+
+
+//破产公告
+
+case class bankruptcy_open_case(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = {
+    val str = ChangeExtractUtils.getTags(newMap, "破产重整", Array("case_no", "case_type", "agency_court", "applicant", "respondent", "public_date"))
+    str
+  }
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("public_date"),newMap("update_time"))
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"${newMap("case_no")}破产重整发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"新增${newMap("case_no")}破产重整")
+
+
+
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_bid_list.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/7/31
+ * @Description:
+ */
+
+
+//招投标
+
+case class company_bid_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"${newMap("title")}招投标信息发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"新增${newMap("title")}招投标信息")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "招投标", Array("publish_time", "title", "purchaser", "province", "abs"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("publish_time"),newMap("update_time"))
+
+  override def getCondition(): String = {
+
+    " AND publish_time >= '2020-01-01'"
+  }
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_reg_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//软件著作权
+
+case class company_copyright_reg_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("full_name"), s"${newMap("full_name")}软件著作权发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("full_name"), s"新增${newMap("full_name")}软件著作权")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("软件著作权", newMap("full_name"), newMap("reg_time"), newMap("reg_num"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("reg_time")
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_works_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//作品著作权
+
+case class company_copyright_works_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"${newMap("name")}作品著作权发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增${newMap("name")}作品著作权")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("作品著作权", newMap("name"), newMap("reg_time"), newMap("reg_num"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("reg_time")
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_court_open_announcement_list.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/11
+ * @Description:开庭公告
+ */
+
+case class company_court_open_announcement_list(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"${newMap("case_no")}开庭公告发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"新增${newMap("case_no")}开庭公告")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "开庭公告", Array("case_no", "start_date"))
+
+  override def getBizTime(newMap: Map[String, String]): String ={
+    DateUtils.getBizDate(newMap("start_date"), newMap("update_time"))
+  }
+
+  override def getCondition(): String = {
+
+    " AND start_date >= '2020-01-01'"
+  }
+
+}

+ 20 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_finance.scala

@@ -0,0 +1,20 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/8/12 17:42
+ * @Description: 融资历史
+ */
+case class company_finance(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "融资历史发生变化"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = s"获得了${newMap.getOrElse("round", "")}融资,由${newMap.getOrElse("inverstors", "")}投资"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "融资历史", Array("company_name", "finance_time", "money", "round", "inverstors"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("report_date")
+}

+ 20 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder.scala

@@ -0,0 +1,20 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/8/19 14:10
+ * @Description: 股东
+ */
+case class company_holder(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "股东发生更新"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = "新增股东"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "股东信息", Array("holder_id", "holder_type", "amount", "capital", "capital_actual"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("update_time")
+}

+ 20 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder_v2.scala

@@ -0,0 +1,20 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: π
+ * @Date: 2020/12/10
+ * @Description: 股东新增-移除
+ */
+case class company_holder_v2(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "股东移除"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = "新增股东"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "股东信息", Array("holder_id", "holder_type", "amount", "capital", "capital_actual"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("update_time")
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_announcement.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/4
+ * @Description:
+ */
+
+
+//购地信息
+
+case class company_land_announcement(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("project_name"), s"${newMap("project_name")}购地信息发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("project_name"), s"新增${newMap("project_name")}购地信息")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "购地信息", Array("project_name", "project_loc", "area", "tran_price", "e_number"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("actual_start_time"),newMap("update_time"))
+
+  override def getCondition(): String = {
+
+    " AND contract_date >= '2018-01-01'"
+  }
+}

+ 31 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_mortgage.scala

@@ -0,0 +1,31 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/4
+ * @Description:
+ */
+
+
+//土地抵押
+
+case class company_land_mortgage(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("land_num"), s"${newMap("land_num")}土地抵押发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("land_num"), s"新增${newMap("land_num")}土地抵押")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "土地抵押", Array("land_mark", "land_num", "land_aministrative_area", "land_loc", "land_area", "use_right_num", "use_for", "area", "evaluate_amount", "mortgage_amount", "start_date", "end_date"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("start_date"),newMap("update_time"))
+
+
+  override def getCondition(): String = {
+
+    " AND start_date >= '2018-01-01'"
+  }
+}

+ 42 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_mortgage_info.scala

@@ -0,0 +1,42 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/10
+ * @Description:
+ */
+
+
+//动产抵押
+
+case class company_mortgage_info(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = {
+    val str = ChangeExtractUtils.getTags(newMap, "动产抵押", Array("reg_num", "reg_date", "publish_date", "amount", "overview_amount"))
+    str
+  }
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    //若公示日期为null则取登记日期
+    if(newMap("publish_date")==null) {
+      if(newMap("reg_date")==null){
+        newMap("update_time")
+      }
+      else {
+        newMap("reg_date")
+      }
+    }
+    else {
+      newMap("publish_date")
+    }
+  }
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("reg_num"), s"${newMap("reg_num")}动产抵押发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("reg_num"), s"新增${newMap("reg_num")}动产抵押")
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_patent_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//专利
+
+case class company_patent_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"${newMap("title")}专利发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"新增${newMap("title")}专利")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("专利", newMap("title"), newMap("app_date"), newMap("app_number"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("app_date")
+}

+ 27 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_tm.scala

@@ -0,0 +1,27 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//商标
+
+case class company_tm(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("tm_name"), s"${newMap("tm_name")}商标发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("tm_name"), s"新增${newMap("tm_name")}商标")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("商标", newMap("tm_name"), newMap("app_date"), newMap("reg_no"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("app_date"), newMap("update_time"))
+  }
+}

+ 21 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_list.scala

@@ -0,0 +1,21 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+//被执行人
+case class company_zxr_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"${newMap("cname")}被执行人发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"新增${newMap("cname")}被执行人")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap,"成为被执行人", Array("case_create_time", "case_no", "exec_money"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("case_create_time")
+
+  override def getCondition(): String = {
+
+    " AND status =1"
+  }
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_restrict.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/14
+ * @Description:限制消费令
+ */
+//限制消费令
+case class company_zxr_restrict(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = {
+    val str = ChangeExtractUtils.getTags(newMap, "被限制高消费", Array("name", "identity_num", "court_name", "case_create_time", "case_no"))
+    str
+  }
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("case_create_time"),newMap("update_time"))
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"${newMap("case_no")}限制消费令发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"新增${newMap("case_no")}限制消费令")
+
+  override def getCondition(): String = {
+
+    " AND status =1"
+  }
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/increase_registered_capital_info.scala

@@ -0,0 +1,23 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/11
+ * @Description:增资记录
+ */
+
+case class increase_registered_capital_info(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("change_item"), s"注册资本发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("change_item"), s"增资记录")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "增资记录", Array("change_item","change_time"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("change_time"), newMap("update_time"))
+  }
+}

+ 28 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/wenshu_detail_combine.scala

@@ -0,0 +1,28 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/18
+ * @Description:裁判文书
+ */
+
+case class wenshu_detail_combine(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"裁判文书")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"裁判文书")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "裁判文书", Array("case_no", "cname"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("judge_date"), newMap("update_date"))
+  }
+
+  override def getCondition(): String = {
+
+    " AND crawl_date >= '2020-01-01'"
+  }
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate.scala

@@ -0,0 +1,24 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: lyb
+ * @Date: 2021-01-07
+ * @Description:询价
+ */
+
+case class zxr_evaluate(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"询价评估发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增询价评估")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "询价评估", Array("case_no","asset_type", "asset_name","insert_time" ))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("insert_time"), newMap("update_time"))
+  }
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate_results.scala

@@ -0,0 +1,24 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: lyb
+ * @Date: 2021-01-07
+ * @Description:询价
+ */
+
+case class zxr_evaluate_results(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"询价评估结果发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增询价评估结果")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "询价评估结果", Array("case_no","asset_type", "asset_name","publish_time", "money" ))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("publish_time"), newMap("update_time"))
+  }
+}

+ 320 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/CompanyMonitor.scala

@@ -0,0 +1,320 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor
+
+import java.util.Date
+
+import com.winhc.bigdata.spark.config.EsConfig
+import com.winhc.bigdata.spark.jobs.dynamic.CompanyDynamicHandleUtils
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+import com.winhc.bigdata.spark.udf.BaseFunc
+import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+import com.winhc.bigdata.spark.utils.ReflectUtils.getClazz
+import com.winhc.bigdata.spark.utils.{AsyncExtract, LoggingUtils, SparkUtils}
+import org.apache.commons.lang3.time.DateFormatUtils
+import org.apache.spark.internal.Logging
+import org.apache.spark.sql.types.StringType
+import org.apache.spark.sql.{Row, SparkSession}
+
+import scala.annotation.meta.getter
+import scala.collection.immutable.ListMap
+import scala.collection.mutable
+
+/**
+ * @Author: π
+ * @Date: 2020/12/8
+ * @Description: 企业财产监控
+ */
+object CompanyMonitor {
+  val env = "dev"
+  val targetTab = "ads_company_monitor"
+
+  case class CompanyMonitorUtil(s: SparkSession,
+                                project: String, //表所在工程名
+                                ds: String //此维度主键
+                               ) extends LoggingUtils with Logging with BaseFunc {
+    @(transient@getter) val spark: SparkSession = s
+
+
+    def init(): Unit = {
+      sql(
+        s"""
+           |CREATE TABLE IF NOT EXISTS ${getEnvProjectName(env, project)}.$targetTab
+           |(
+           |    id              STRING COMMENT '唯一标示',
+           |    cid             STRING COMMENT '公司id',
+           |    cname           STRING COMMENT '公司name',
+           |    info_type       STRING COMMENT '变更分类,大类',
+           |    flow_type       STRING COMMENT '财产流向',
+           |    rta_desc        STRING COMMENT '变更信息描述,变更标题',
+           |    change_time     STRING COMMENT '变更时间',
+           |    biz_id          STRING COMMENT '业务id,数据行id',
+           |    info_risk_level STRING COMMENT '变更风险等级',
+           |    amt             STRING COMMENT '金额',
+           |    create_time     STRING COMMENT '创建时间'
+           |)
+           |COMMENT '企业财务监控输出表'
+           |PARTITIONED BY
+           |(
+           |    ds              STRING COMMENT '分区',
+           |    tn              STRING COMMENT '表名'
+           |)
+           |LIFECYCLE 30
+           |""".stripMargin)
+    }
+
+    val tabMapping =
+      Map("wenshu_detail_combine_v2" -> "wenshu_detail_combine" //胜诉案件
+        , "company_land_mortgage_v2" -> "company_land_mortgage" //土地抵押权人
+      )
+
+    //转换字段
+    def trans(s: String): String = {
+      var res = s
+      if (tabMapping.contains(s)) {
+        res = tabMapping(s)
+      }
+      res
+    }
+
+    //表名(不加前后辍)
+    def calc(tableName: String
+             , bName: Int = 0 //是否补充cname字段
+            ): Unit = {
+      val handle = getClazz[CompanyMonitorHandle](s"com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables.$tableName")
+
+      val types = handle.org_type()
+      val conditional = handle.get_conditional_filter()
+      val tn = trans(tableName)
+
+      val rdd = sql(
+        bName match {
+          //默认:无需补全cname字段
+          case 0 =>
+            s"""
+               |SELECT  *,null AS cname
+               |FROM    ${project}.ads_change_extract_all
+               |WHERE   ds = '$ds'
+               |AND     tn = '$tn'
+               |AND     TYPE in (${types.map("'" + _ + "'").mkString(",")})
+               |$conditional
+               |""".stripMargin
+          //需根据cid补全cname字段数据
+          case 1 =>
+            s"""
+               |SELECT A.*,B.cname AS cname
+               |FROM(
+               |  SELECT  *
+               |  FROM    ${project}.ads_change_extract_all
+               |  WHERE   ds = '$ds'
+               |  AND     tn = '$tn'
+               |  AND     TYPE in (${types.map("'" + _ + "'").mkString(",")})
+               |  $conditional
+               |) AS A
+               |LEFT JOIN (
+               |    SELECT cid,cname FROM  $project.base_company_mapping
+               |    WHERE ds = '${getLastPartitionsOrElse(project + ".base_company_mapping", "0")}'
+               |) AS B
+               |ON A.cid = B.cid
+               |""".stripMargin
+
+        })
+        .rdd.flatMap(r => {
+        val rowkey = r.getAs[String]("rowkey")
+        val cid = r.getAs[String]("cid")
+        val new_data = r.getAs[Map[String, String]]("data")
+        val old_data = r.getAs[Map[String, String]]("old_data")
+        val biz_date = r.getAs[String]("biz_date")
+        val fields = r.getAs[String]("fields")
+        val cname = r.getAs[String]("cname")
+
+        var createTime = ""
+        if(new_data.contains("update_time")){
+          createTime = new_data("update_time")
+        }else if(new_data.contains("update_date")){
+          createTime = new_data("update_date")
+        }
+
+        if(createTime == null || createTime.trim.equals("") || createTime.equalsIgnoreCase("null")){
+          if(new_data.contains("create_time")){
+            createTime = new_data("create_time")
+          }else if(new_data.contains("crawl_date")){
+            createTime = new_data("crawl_date")
+          }
+        }
+
+        if(createTime == null || createTime.trim.equals("") || createTime.equalsIgnoreCase("null")){
+          createTime = biz_date
+        }
+        if(createTime == null || createTime.trim.equals("")|| createTime.equalsIgnoreCase("null")){
+          None
+        }
+
+
+
+        if (biz_date == null)
+          None
+        val result = handle.handle(rowkey, biz_date, cid, if (fields == null) null else fields.split(","), old_data, new_data, cname)
+        if (result == null) {
+          None
+        }
+        else {
+          result.map(res => Row(CompanyDynamicHandleUtils.getDynamicId(res._1, res._5, res._7, res._6),
+            res._1, res._2, res._3, res._4,
+            res._5.replaceAll("null", ""), res._6, res._7, res._8, res._9,
+            createTime, res._10))
+        }
+      })
+
+      val schema = getSchema(ListMap(
+        "id" -> StringType
+        , "cid" -> StringType
+        , "cname" -> StringType
+        , "table_type" -> StringType
+        , "flow_type" -> StringType
+        , "rta_desc" -> StringType
+        , "change_time" -> StringType
+        , "biz_id" -> StringType
+        , "info_risk_level" -> StringType
+        , "type" -> StringType
+        , "create_time" -> StringType
+        , "amt" -> StringType
+      ))
+      spark.createDataFrame(rdd, schema)
+        .createOrReplaceTempView(s"company_monitor_tmp1_$tableName")
+
+      unescapeHtml4()
+
+      if(tableName.equals("company_zxr_restrict") || tableName.equals("company_dishonest_info")){
+        var dataFlag = 3
+        if(tableName.equals("company_zxr_restrict")){
+          dataFlag = 5
+        }
+        sql(
+          s"""
+             |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
+             |SELECT  id
+             |        , cid
+             |        , cname
+             |        , table_type
+             |        , flow_type
+             |        , unescapeHtml4(rta_desc) rta_desc
+             |        , change_time
+             |        , biz_id
+             |        , info_risk_level
+             |        , type
+             |        , create_time
+             |        , if(case_amt is null, 0, case_amt/10000)
+             |FROM
+             |    (
+             |     select c.*, d.case_amt,  ROW_NUMBER() OVER (PARTITION BY c.id  ORDER BY d.case_amt DESC)  num from
+             |          ( SELECT a.*, b.judicase_id FROM  company_monitor_tmp1_$tableName a
+             |            LEFT JOIN winhc_eci_dev.ads_judicial_case_relation_replace_cids b  ON a.biz_id = b.detail_id where b.ds = '${getLastPartitionsOrElse("winhc_eci_dev.ads_judicial_case_relation_replace_cids", "0")}' and b.flag = '$dataFlag'
+             |          ) c LEFT JOIN winhc_eci.ads_judicial_case_relation_r1 d ON c.judicase_id = d.judicase_id
+             |    )
+             |WHERE id IS NOT NULL
+             |AND   to_timestamp(change_time) <= now() and num = 1
+             |""".stripMargin)
+      }else{
+        sql(
+          s"""
+             |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
+             |SELECT  id
+             |        , cid
+             |        , cname
+             |        , table_type
+             |        , flow_type
+             |        , unescapeHtml4(rta_desc) rta_desc
+             |        , change_time
+             |        , biz_id
+             |        , info_risk_level
+             |        , type
+             |        , create_time
+             |        , amt
+             |FROM
+             |    company_monitor_tmp1_$tableName
+             |WHERE id IS NOT NULL
+             |AND   to_timestamp(change_time) <= now()
+             |""".stripMargin)
+      }
+
+    }
+  }
+
+  private val startArgs = Seq(
+    Args(tableName = "wenshu_detail_combine_v2", bName = 1)
+    , Args(tableName = "company_dishonest_info", bName = 1)
+    , Args(tableName = "company_zxr_list", bName = 1)
+    , Args(tableName = "company_zxr_restrict", bName = 1)
+    , Args(tableName = "company_court_open_announcement_list", bName = 1)
+    , Args(tableName = "wenshu_detail_combine", bName = 1)
+    , Args(tableName = "company_equity_info_list", bName = 1)
+    , Args(tableName = "company_land_mortgage", bName = 1)
+    , Args(tableName = "company_land_announcement", bName = 1)
+    , Args(tableName = "company_finance", bName = 1)
+    , Args(tableName = "bankruptcy_open_case", bName = 1)
+    , Args(tableName = "company_bid_list", bName = 1)
+    , Args(tableName = "company_mortgage_info", bName = 1)
+    , Args(tableName = "company_tm", bName = 1)
+    , Args(tableName = "company_patent_list", bName = 1)
+    , Args(tableName = "company_copyright_reg_list", bName = 1)
+    , Args(tableName = "company_copyright_works_list", bName = 1)
+    , Args(tableName = "company_holder_v2", bName = 1)
+    //, Args(tableName = "company", bName = 1)
+    , Args(tableName = "company_land_mortgage_v2", bName = 1)
+    , Args(tableName = "auction_tracking_list", bName = 1)
+    , Args(tableName = "increase_registered_capital_info", bName = 1)
+    , Args(tableName = "zxr_evaluate", bName = 1)
+    , Args(tableName = "zxr_evaluate_results", bName = 1)
+
+  )
+
+  private case class Args(project: String = "winhc_eci_dev"
+                          , tableName: String
+                          , bName: Int = 1
+                          , aggs: Int = 0)
+
+  def main(args: Array[String]): Unit = {
+
+
+    if (args.length != 3) {
+      println(
+        s"""
+           |Please enter the legal parameters !
+           |<project> <tableNames> <ds>
+           |""".stripMargin)
+      sys.exit(-99)
+    }
+
+    val Array(project, tableNames, ds) = args
+
+    println(
+      s"""
+         |project: $project
+         |tableNames: $tableNames
+         |ds: $ds
+         |""".stripMargin)
+
+    val config = EsConfig.getEsConfigMap ++ mutable.Map(
+      "spark.hadoop.odps.project.name" -> project,
+      "spark.hadoop.odps.spark.local.partition.amt" -> "1000"
+    )
+    val spark = SparkUtils.InitEnv("CompanyMonitor", config)
+    val cd = CompanyMonitorUtil(spark, project, ds)
+    //cd.init()
+
+    var start = startArgs
+    if (!tableNames.equals("all")) {
+      val set = tableNames.split(",").toSet
+      start = start.filter(a => set.contains(a.tableName))
+    }
+
+    val a = start.map(e => (e.tableName, () => {
+      e.aggs match {
+        case _ => cd.calc(e.tableName, e.bName) //通用处理
+      }
+      true
+    }))
+
+    AsyncExtract.startAndWait(spark, a)
+    spark.stop()
+  }
+}

+ 37 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/auction_tracking_list.scala

@@ -0,0 +1,37 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/12/14
+ * @Description: 司法拍卖
+ */
+case class auction_tracking_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""标题:${new_map("auction_title")}
+       |起拍价:${new_map("initial_price")}
+       |拍卖时间:${new_map("end_time")}""".stripMargin
+
+
+  //override def org_type(): Seq[String] = Seq("insert","update")
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("query_price")
+    if (amtstr == null){
+      "0"
+    }else {
+      amtstr = processMoney(amtstr)
+      ((amtstr.toDouble)/ 10000).formatted("%.2f")
+    }
+  }
+
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/bankruptcy_open_case.scala

@@ -0,0 +1,23 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 破产公告
+ */
+case class bankruptcy_open_case() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |申请人:${new_map("applicant")}
+       |公开日期:${new_map("public_date")}""".stripMargin
+
+
+}

+ 77 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company.scala

@@ -0,0 +1,77 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+import com.winhc.bigdata.spark.utils.RegCapitalAmount
+import org.apache.commons.lang3.StringUtils
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 工商数据
+ */
+case class company() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""注册资本:${new_map("reg_capital")}
+       |变更时间:${new_map("update_time")}""".stripMargin
+
+  //注册资本上升
+  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
+    if (!compareAmount(old_map, new_map)) {
+      return Seq.empty
+    }
+    try {
+      Seq((cid
+        , cname
+        , get_table_type()
+        , get_flow_type()
+        , get_rta_desc(old_map, new_map)
+        , get_change_time(bizDate, new_map)
+        , get_biz_id(rowkey, new_map)
+        , get_info_risk_level(old_map, new_map)
+        , get_type()
+        , getMoney(new_map)
+      ))
+    } catch {
+      case e: Exception => {
+        logError(e.getMessage, e)
+      }
+        Seq.empty
+    }
+  }
+
+  /**
+   * 来源表的变更类型,insert or update
+   *
+   * @return
+   */
+  override def org_type(): Seq[String] = {
+    Seq("update")
+  }
+
+  /**
+   * 注册资本比较
+   */
+  def compareAmount(old_map: Map[String, String], new_map: Map[String, String]): Boolean = {
+    if (old_map == null || new_map == null) {
+      return false
+    }
+    val old = old_map("reg_capital")
+    val now = new_map("reg_capital")
+    if (StringUtils.isBlank(old) || StringUtils.isBlank(now)) {
+      return false
+    }
+    val old_num = RegCapitalAmount.getAmount(old)
+    val now_num = RegCapitalAmount.getAmount(now)
+    if (StringUtils.isBlank(old_num) || StringUtils.isBlank(now_num)) {
+      return false
+    }
+    old_num.toDouble < now_num.toDouble
+  }
+
+}

+ 21 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_bid_list.scala

@@ -0,0 +1,21 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 招投标
+ */
+case class company_bid_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""标题:${new_map("title")}""".stripMargin
+
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_reg_list.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 软件著作权
+ */
+case class company_copyright_reg_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""登记号:${new_map("reg_num")}
+       |软件名称:${new_map("full_name")}
+       |软件简称:${new_map("simple_name")}""".stripMargin
+
+
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_works_list.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 著作权
+ */
+case class company_copyright_works_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""作品名称:${new_map("name")}
+       |作品类别:${new_map("type")}
+       |登记日期:${new_map("reg_time")}""".stripMargin
+
+
+
+}

+ 29 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_court_open_announcement_list.scala

@@ -0,0 +1,29 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 有恢复执行案件即将开庭
+ */
+case class company_court_open_announcement_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String = {
+    s"""上诉人/原告:${new_map("plaintiff")}
+       |被上诉人/被告:${new_map("defendant")}
+       |案号:${new_map("case_no")}
+       |案由:${new_map("case_reason")}""".stripMargin
+  }
+
+  override def get_conditional_filter(): String = {
+    "AND  data['case_no'] like concat('%','恢','%')"
+  }
+
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_dishonest_info.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 企业失信
+ */
+case class company_dishonest_info() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |履行情况:${new_map("performance")}
+       |发布日期:${new_map("pub_date")}
+       |失信行为:${new_map("action_content")}""".stripMargin
+
+
+}

+ 34 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_equity_info_list.scala

@@ -0,0 +1,34 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 股权出质人
+ */
+case class company_equity_info_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""股权出质登记日期:${new_map("reg_date")}
+       |质权人:${new_map("pledgee")}
+       |出质股权标的企业:${new_map("target")}
+       |出质股权数额:${new_map("equity_amount")}""".stripMargin
+
+  override def get_conditional_filter(): String = {
+    "AND data['type'] = '1'"
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("equity_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_finance.scala

@@ -0,0 +1,23 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 融资信息
+ */
+case class company_finance() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""融资金额:${new_map("money")}
+       |轮次:${new_map("round")}
+       |投资人:${new_map("inverstors")}""".stripMargin
+
+
+}

+ 76 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_holder_v2.scala

@@ -0,0 +1,76 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 股东
+ */
+case class company_holder_v2() extends CompanyMonitorHandle {
+
+  /**
+   * 来源表的变更类型,insert or update
+   *
+   * @return
+   */
+  override def org_type(): Seq[String] = Seq("insert", "update")
+
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   * cid
+   * cname
+   * table_type
+   * flow_type
+   * rta_desc
+   * change_time
+   * biz_id
+   * info_risk_level
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""|认缴金额:${new_map("amount")}
+        |认缴出资时间:${new_map("update_time")}""".stripMargin
+
+  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
+    val rta_desc = get_rta_desc(old_map, new_map)
+    if (rta_desc == null) {
+      return Seq.empty
+    }
+
+    try {
+      var flag = false
+      if (change_fields == null) {
+        flag = true
+      }
+      Seq((cid
+        , cname
+        //, if (flag) "company_holder_add" else "company_holder_deleted"
+        , if (flag) "10" else "10"
+        , if (flag) "2" else "1"
+        , rta_desc
+        , get_change_time(bizDate, new_map)
+        , get_biz_id(rowkey, new_map)
+        , if (flag) "2" else "1"
+        , if (flag) "10" else "11"
+        , getMoney(new_map)
+      ))
+    } catch {
+      case e: Exception => {
+        logError(e.getMessage, e)
+      }
+        Seq.empty
+    }
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+
+}

+ 31 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_announcement.scala

@@ -0,0 +1,31 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 购地信息
+ */
+case class company_land_announcement() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""项目名称:${new_map("project_name")}
+       |面积(公顷):${new_map("area")}
+       |成交价格(万元):${new_map("tran_price")}
+       |合同签订日期:${new_map("contract_date")}""".stripMargin
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("tran_price")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 土地抵押-抵押人
+ */
+case class company_land_mortgage() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""所在行政区:${new_map("land_aministrative_area")}
+       |土地面积(公顷):${new_map("land_area")}
+       |抵押面积(公顷):${new_map("area")}
+       |评估金额(万元):${new_map("evaluate_amount")}
+       |抵押金额(万元):${new_map("mortgage_amount")}""".stripMargin
+
+
+  override def get_conditional_filter(): String = {
+    "AND  data['type'] in ('bothone','mortgagor') "
+  }
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("mortgage_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage_v2.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 土地抵押-抵押权人
+ */
+case class company_land_mortgage_v2() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""所在行政区:${new_map("land_aministrative_area")}
+       |土地面积(公顷):${new_map("land_area")}
+       |抵押面积(公顷):${new_map("area")}
+       |评估金额(万元):${new_map("evaluate_amount")}
+       |抵押金额(万元):${new_map("mortgage_amount")}""".stripMargin
+
+
+  override def get_conditional_filter(): String = {
+    "AND  data['type'] in ('bothtwo','mortgagee') "
+  }
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("mortgage_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 31 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_mortgage_info.scala

@@ -0,0 +1,31 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 动产抵押
+ */
+case class company_mortgage_info() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""登记编号:${new_map("reg_num")}
+       |被担保债权数额:${new_map("amount")}
+       |登记机关:${new_map("reg_department")}
+       |状态:${new_map("status")}""".stripMargin
+
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_patent_list.scala

@@ -0,0 +1,25 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 专利
+ */
+case class company_patent_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""申请号:${new_map("app_number")}
+       |公开公告号:${new_map("pub_number")}
+       |专利名称:${new_map("title")}
+       |公开公告日:${new_map("pub_date")}""".stripMargin
+
+
+
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_tm.scala

@@ -0,0 +1,25 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 商标
+ */
+case class company_tm() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""商标名:${new_map("tm_name")}
+       |申请日期:${new_map("app_date")}
+       |注册号:${new_map("reg_no")}
+       |申请人:${new_map("applicant_cn")}""".stripMargin
+
+
+
+}

+ 36 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_list.scala

@@ -0,0 +1,36 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 企业被执
+ */
+case class company_zxr_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""更新日期:${new_map("update_time")}
+       |执行标的:${new_map("exec_money")}
+       |案号:${new_map("case_no")}
+       |立案日期:${new_map("case_create_time")}""".stripMargin
+
+
+
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("exec_money")
+    amtstr = processMoney(amtstr)
+    amtstr = (amtstr.toDouble/10000 ).formatted("%.2f")
+    amtstr
+
+
+  }
+}

+ 27 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_restrict.scala

@@ -0,0 +1,27 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 限制消费令
+ */
+case class company_zxr_restrict() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |企业名称:${new_map("company_name")}
+       |限制消费人名称:${new_map("name")}
+       |执行法院名称:${new_map("court_name")}
+       |立案日期:${new_map("case_create_time")}""".stripMargin
+
+
+
+
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/increase_registered_capital_info.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/12/14
+ * @Description: 增资记录
+ */
+case class increase_registered_capital_info() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""变更后资本:${new_map("content_after")}
+       |变更前资本:${new_map("content_before")}
+       |变更时间:${new_map("change_time")}""".stripMargin
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("content_after")
+
+    amtstr = processMoney(amtstr)
+
+    amtstr
+
+
+  }
+}

+ 47 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine.scala

@@ -0,0 +1,47 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 有恢复执行案件-文书
+ */
+case class wenshu_detail_combine() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String = {
+    s"""案件名称:${new_map("title")}
+       |案号:${new_map("case_no")}
+       |法院:${new_map("court_name")}
+       |判决日期:${new_map("judge_date")}""".stripMargin
+  }
+
+  override def get_conditional_filter(): String = {
+    "AND  data['case_no'] like concat('%','恢','%')"
+  }
+
+  /**
+   * 业务id
+   *
+   * @param rowkey
+   * @return
+   */
+  override def get_biz_id(rowkey: String, new_map: Map[String, String]): String = {
+    new_map("case_id")
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("case_amt")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+
+  }
+}

+ 51 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine_v2.scala

@@ -0,0 +1,51 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 新增胜诉案件(原告)
+ */
+case class wenshu_detail_combine_v2() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String = {
+    s"""案件名称:${new_map("title")}
+       |案号:${new_map("case_no")}
+       |法院:${new_map("court_name")}
+       |判决日期:${new_map("judge_date")}""".stripMargin
+  }
+
+  override def get_conditional_filter(): String = {
+    "AND data['is_success'] = '胜'  AND data['case_stage']= '一审'  AND  data['case_type'] = '民事案件' AND  data['name_type'] = 'y'"
+  }
+
+
+  /**
+   * 业务id
+   *
+   * @param rowkey
+   * @return
+   */
+  override def get_biz_id(rowkey: String, new_map: Map[String, String]): String = {
+    new_map("case_id")
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("case_amt")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+
+
+
+  }
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2021/1/07 18:50
+ * @Description: 询价评估
+ */
+case class zxr_evaluate() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |财产类型:${new_map("asset_type")}
+       |财产名称:${new_map("asset_name")}
+       |日期:${new_map("insert_time")}""".stripMargin
+
+
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate_results.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2021/1/07 18:50
+ * @Description: 询价评估结果
+ */
+case class zxr_evaluate_results() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |财产类型:${new_map("asset_type")}
+       |财产名称:${new_map("asset_name")}
+       |日期:${new_map("publish_time")}""".stripMargin
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("money")
+    if (amtstr == null){
+      "0"
+    }else {
+      var a1 = amtstr.replaceAll("\"|\\[|\\]", "").split(",").toArray.map(_.toDouble)
+      var d = a1.sum / a1.length / 10000
+      d.formatted("%.2f")
+    }
+  }
+}