Browse Source

案源机会v8

xufei 2 years ago
parent
commit
c4e61d5cc1
33 changed files with 2062 additions and 27 deletions
  1. 29 18
      src/main/scala/com/winhc/bigdata/spark/const/CaseChanceConst.scala
  2. 137 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyChanceHandle.scala
  3. 106 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyChanceRecord.scala
  4. 33 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyChanceUtils.scala
  5. 473 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyCourtAnnouncementV2.scala
  6. 363 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/GoodNewsV2.scala
  7. 51 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/bankruptcy_open_case.scala
  8. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_bid.scala
  9. 33 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_certificate.scala
  10. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_copyright_reg.scala
  11. 33 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_copyright_works.scala
  12. 54 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_court_announcement.scala
  13. 51 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_court_announcement_v1.scala
  14. 34 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_court_open_announcement.scala
  15. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_dishonest_info.scala
  16. 33 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_employment.scala
  17. 35 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_equity_info.scala
  18. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_holder.scala
  19. 33 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_land_announcement.scala
  20. 33 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_land_publicity.scala
  21. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_land_transfer.scala
  22. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_patent.scala
  23. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_tm.scala
  24. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_zxr.scala
  25. 32 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_zxr_restrict.scala
  26. 34 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2.scala
  27. 53 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2_bg_yishen.scala
  28. 48 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2_yg_yishen.scala
  29. 53 0
      src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2_yg_zhongben.scala
  30. 1 0
      src/main/scala/com/winhc/bigdata/spark/ng/change/NgChangeExtractArgs.scala
  31. 12 0
      src/main/scala/com/winhc/bigdata/spark/ng/change/table/company_employment.scala
  32. 4 0
      src/main/scala/com/winhc/bigdata/spark/udf/BaseFunc.scala
  33. 38 9
      src/main/scala/com/winhc/bigdata/spark/utils/BaseUtil.scala

+ 29 - 18
src/main/scala/com/winhc/bigdata/spark/const/CaseChanceConst.scala

@@ -10,52 +10,63 @@ object CaseChanceConst {
   val TABLE_2_TYPE = Map(
     "" -> "1"
     , "" -> "2"
-    , "company_bid_list" -> "3"
+    , "company_bid" -> "3"
     , "company_land_transfer" -> "3"
     , "company_tm" -> "3"
-    , "company_patent_list" -> "3"
+    , "company_patent" -> "3"
     , "company_certificate" -> "3"
-    , "company_copyright_works_list" -> "3"
-    , "company_copyright_reg_list" -> "3"
+    , "company_copyright_works" -> "3"
+    , "company_copyright_reg" -> "3"
     , "company_employment" -> "3"
     , "company_land_announcement" -> "3"
     , "company_land_publicity" -> "3"
     , "company_dishonest_info" -> "3"//失信人
     , "company_zxr_restrict" -> "3"//限高
-    , "company_zxr_list" -> "3"//被执
-    , "company_court_open_announcement_list" -> "3"//开庭
-    , "wenshu_detail_combine" -> "3"//文书
+    , "company_zxr" -> "3"//被执
+    , "company_court_open_announcement" -> "3"//开庭
+    , "wenshu_detail_v2" -> "3"//文书
+    , "bankruptcy_open_case" -> "9"//破产
     , "company_holder" -> "3"//股东
-    , "company_equity_info_list" -> "3"//出质人
-    , "" -> "4"
+    , "company_equity_info" -> "3"//出质人
+    , "company_court_announcement" -> "7"//法院公告-原告
+    , "company_court_announcement_v1" -> "8"//法院公告-被告
+    , "wenshu_detail_v2_yg_yishen" -> "5"//裁判文书 - 原告 - 一审
+    , "wenshu_detail_v2_yg_zhongben" -> "6"//裁判文书 - 原告 - 终本
+    , "wenshu_detail_v2_bg_yishen" -> "4"//裁判文书 - 被告 - 一审
   )
 
   //利好消息祥细type
   val CHANCE_DYNAMIC_TYPE = Map(
     /*"" -> "3-1" //企业增资
     , "" -> "3-2" //企业新增对外投资
-    , "company_bid_list" -> "3-3" //新增招投标
+    , "company_bid" -> "3-3" //新增招投标
     , "" -> "3-4" //新增招聘
     , "" -> "3-5" //地块公示
     , "" -> "3-6" //购地信息
     , "company_land_transfer" -> "3-7" //土地转让
     ,*/
-    "company_bid_list" -> "3" //新增招投标
+    "bankruptcy_open_case" -> "0" //破产公告
+    ,"company_bid" -> "3" //新增招投标
     , "company_employment" -> "4" //新增招聘
     , "company_land_publicity" -> "5" //地块公示
     , "company_land_announcement" -> "6" //购地信息
     , "company_land_transfer" -> "7" //土地转让
     , "company_tm" -> "8" //知识产权-商标
-    , "company_patent_list" -> "9" //专利
+    , "company_patent" -> "9" //专利
     , "company_certificate" -> "10" //资质证书   X
-    , "company_copyright_works_list" -> "11" //作品著作权
-    , "company_copyright_reg_list" -> "12" //软件著作权
+    , "company_copyright_works" -> "11" //作品著作权
+    , "company_copyright_reg" -> "12" //软件著作权
     , "company_dishonest_info" -> "13" //失信人
     , "company_zxr_restrict" -> "14" //限高
-    , "company_zxr_list" -> "15" //被执
-    , "company_court_open_announcement_list" -> "16" //开庭
-    , "wenshu_detail_combine" -> "17" //文书
+    , "company_zxr" -> "15" //被执
+    , "company_court_open_announcement" -> "16" //开庭
+    , "company_court_announcement" -> "0" //法院公告-原告
+    , "company_court_announcement_v1" -> "0" //法院公告-被告
+    , "wenshu_detail_v2_yg_yishen" -> "0" //裁判文书 - 原告 - 一审
+    , "wenshu_detail_v2_yg_zhongben" -> "0" //裁判文书 - 原告 - 终本
+    , "wenshu_detail_v2_bg_yishen" -> "0" //裁判文书 - 被告 - 一审
+    , "wenshu_detail_v2" -> "17" //文书 - 恢复执行
     , "company_holder" -> "18" //股东
-    , "company_equity_info_list" -> "19" //出质人
+    , "company_equity_info" -> "19" //出质人
   )
 }

+ 137 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyChanceHandle.scala

@@ -0,0 +1,137 @@
+package com.winhc.bigdata.spark.ng.chance
+
+import com.alibaba.fastjson.JSON
+import com.winhc.bigdata.spark.implicits.CaseClass2JsonHelper
+import com.winhc.bigdata.spark.utils.BaseUtil
+import org.apache.commons.lang3.StringUtils
+import org.apache.spark.internal.Logging
+
+import scala.annotation.meta.{getter, setter}
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
+
+/**
+ * @Author: π
+ * @Date: 2021/9/28
+ * @Description: 案源机会
+ */
+trait CompanyChanceHandle extends Logging {
+
+  @getter
+  @setter
+  protected val is_inc: Boolean //false 为存量
+
+  /**
+   * 变更时间
+   *
+   * @param new_map
+   * @return
+   */
+  protected def get_change_time(bizDate: String, new_map: Map[String, String]): String = {
+    var res = bizDate
+    if (bizDate.length == 10) {
+      res = res.concat(" 00:00:00")
+    }
+    res
+  }
+
+  /**
+   * 业务id
+   *
+   * @param rowkey
+   * @return
+   */
+  protected def get_biz_id(rowkey: String, new_map: Map[String, String]): String = rowkey
+
+  /**
+   * 条件过滤
+   *
+   * @return
+   */
+  def get_conditional_filter(): String = ""
+
+  /**
+   * 加入标签
+   *
+   * @return
+   */
+  def get_label(new_map: Map[String, String]): String = ""
+
+  /**
+   * 原告-被告 集合
+   *
+   * @return
+   */
+  def get_party_set(new_map: Map[String, String]): Seq[String] = Seq("", "")
+
+  protected def get_deleted(new_map: Map[String, String], key: String = "deleted"): String = {
+    if (new_map == null) return "-1"
+    val deleted = new_map.getOrElse(key, "-1")
+    deleted
+  }
+
+  def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = CompanyChanceUtils.default_filter
+
+  def flat_map: (ChangeExtract) => Seq[CompanyChanceRecord]
+
+  def getCompanyIds(new_data: Map[String, String]): Seq[String]
+
+  protected def getEntity(json: String, id_key: String): Seq[String] = {
+    if (StringUtils.isEmpty(json)) {
+      Seq.empty
+    } else {
+      val array = JSON.parseArray(json)
+      var list: mutable.Seq[String] = mutable.Seq.empty
+      for (i <- 0 until array.size()) {
+        val jSONObject = array.getJSONObject(i)
+        val keyno = jSONObject.getString(id_key)
+        list = list :+ keyno
+      }
+      list
+    }
+  }
+
+  protected def getCompanyChanceRecord(change_extract: ChangeExtract
+                                       , companyIds: Seq[String]
+                                      ): Seq[CompanyChanceRecord] = {
+
+    val tn = change_extract.tn
+    val update_type = change_extract.update_type
+    val old_data = change_extract.old_data
+    var new_data = change_extract.new_data
+    val biz_date = change_extract.biz_date
+    val rowkey = change_extract.rowkey
+    val update_time = change_extract.update_time
+    val ids = companyIds.filter(_.length == 32).distinct
+    val list = ListBuffer[CompanyChanceRecord]()
+    if (ids.nonEmpty) {
+      ids.foreach(id => {
+        if (new_data.contains("case_amt")) {
+          new_data += ("case_amt" -> BaseUtil.amt_div(BaseUtil.getOrEmptyStr(new_data, "case_amt"), "1").toString)
+        }
+        if (new_data.contains("judge_date")) {
+          new_data += ("judge_date" -> BaseUtil.getOrEmptyStr(new_data, "judge_date").split(" ")(0))
+        }
+        import com.winhc.bigdata.spark.implicits.CaseClass2JsonHelper._
+
+        val record: CompanyChanceRecord = CompanyChanceRecord(
+          id = CompanyChanceUtils.generateId(rowkey, biz_date, tn)
+          , rowkey = rowkey
+          , company_id = id
+          , title = ""
+          , label = get_label(new_data)
+          , change_time = biz_date
+          , update_time = update_time
+          , update_type = update_type
+          , deleted = get_deleted(new_data)
+          , new_data.toJson
+          , get_party_set(new_data).head
+          , get_party_set(new_data).last
+        )
+        list.append(record)
+      })
+    }
+    list
+  }
+
+}

+ 106 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyChanceRecord.scala

@@ -0,0 +1,106 @@
+package com.winhc.bigdata.spark.ng.chance
+
+import cn.hutool.crypto.SecureUtil
+import org.apache.commons.lang3.StringUtils
+import org.apache.spark.sql.Row
+
+/**
+ * @author: π
+ * @date: 2021/9/28 13:49
+ */
+case class ChangeExtract(rowkey: String
+                         , company_id: String
+                         , company_name: String
+                         , tn: String
+                         , update_type: String
+                         , old_data: Map[String, String]
+                         , new_data: Map[String, String]
+                         , change_fields: Seq[String]
+                         , biz_date: String
+                         , update_time: String
+                        )
+
+
+case class EntityInfo(keyno: String
+                      , name: String
+                     ) extends Comparable[EntityInfo] {
+
+
+  override def hashCode(): Int = s"$name".hashCode()
+
+  override def equals(obj: Any): Boolean =
+    obj match {
+      case d: EntityInfo =>
+        s"$name".equals(s"${d.name}")
+      case _ =>
+        false
+    }
+
+  override def compareTo(o: EntityInfo): Int = s"$name".compareTo(s"${o.name}")
+}
+
+case class RowkeyInfo(rowkey: String, tn: String) {
+  def toStr(): String = {
+    s"$tn@@$rowkey"
+  }
+}
+
+
+case class CompanyChanceRecord(id: String,
+                               rowkey: String,
+                               company_id: String,
+                               title: String,
+                               label: String,
+                               change_time: String,
+                               update_time: String,
+                               update_type: String,
+                               deleted: String,
+                               new_data: String,
+                               plaintiff: String,
+                               defendant: String
+                              ) {
+
+  def format(): CompanyChanceRecord = {
+    if (id == null) {
+      return null
+    }
+    //剔除id为空的数据
+    if (StringUtils.isBlank(company_id)) {
+      return null
+    }
+    return CompanyChanceRecord(
+      id,
+      rowkey,
+      company_id,
+      title,
+      label,
+      change_time,
+      update_time,
+      update_type,
+      deleted,
+      new_data,
+      plaintiff,
+      defendant
+    )
+    this
+  }
+
+  def to_row(): Row = {
+    Row(SecureUtil.md5(id)
+      , rowkey
+      , company_id
+      , title
+      , label
+      , change_time
+      , update_time
+      , update_type
+      , deleted
+      , new_data
+      , plaintiff
+      , defendant
+    )
+  }
+
+}
+
+

+ 33 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyChanceUtils.scala

@@ -0,0 +1,33 @@
+package com.winhc.bigdata.spark.ng.chance
+
+import org.apache.commons.lang3.StringUtils
+
+/**
+ * @author: π
+ * @date: 2021/9/28 15:46
+ */
+object CompanyChanceUtils {
+
+  def default_filter(update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]): Boolean = {
+    if (biz_date == null) return false
+    //if (update_type.equals("remove") || update_type.equals("other")) return false
+    if (update_type.equals("update") && change_fields.isEmpty) return false
+    true
+  }
+
+  def generateId(rowkey: String, biz_date: String, tn: String, random_num: String = null): String = {
+    if (StringUtils.isEmpty(random_num)) {
+      s"$rowkey@$biz_date@$tn"
+    } else {
+      s"$rowkey@$biz_date@$tn@$random_num"
+    }
+  }
+
+  def formatDate(date: String): String = {
+    if (StringUtils.isEmpty(date)) {
+      null
+    } else {
+      if (date.contains(" ")) date.split(" ")(0) else date
+    }
+  }
+}

+ 473 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/CompanyCourtAnnouncementV2.scala

@@ -0,0 +1,473 @@
+package com.winhc.bigdata.spark.jobs
+
+import java.util
+import java.util.Collections
+
+import com.alibaba.fastjson.JSON
+import com.winhc.bigdata.spark.const.EnvConst
+import com.winhc.bigdata.spark.udf.{BaseFunc, CompanyMapping, JsonSerializable}
+import com.winhc.bigdata.spark.utils.BaseUtil._
+import com.winhc.bigdata.spark.utils.{BaseUtil, LoggingUtils, SparkUtils}
+import org.apache.commons.lang3.StringUtils
+import org.apache.http.HttpHost
+import org.apache.http.auth.{AuthScope, UsernamePasswordCredentials}
+import org.apache.http.entity.ContentType
+import org.apache.http.impl.client.BasicCredentialsProvider
+import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
+import org.apache.http.nio.entity.NStringEntity
+import org.apache.http.util.EntityUtils
+import org.apache.spark.internal.Logging
+import org.apache.spark.sql.functions.col
+import org.apache.spark.sql.{Row, SparkSession}
+import org.elasticsearch.client.{RestClient, RestClientBuilder}
+import org.json4s.jackson.Json
+
+import scala.annotation.meta.getter
+import scala.collection.mutable
+
+/**
+ * @Description: 法院公告
+ * @author π
+ * @date 2020/7/1014:33
+ */
+
+case class CourtAnnouncementV2(
+                              plaintiff_name: String,
+                              litigant_name: String,
+                              case_no: String,
+                              publish_date: String
+                            ) extends JsonSerializable
+
+object CourtAnnouncementV2 {
+  def apply(r: Row, cols: Seq[String]) = {
+    val res: Map[String, String] = cols.map(c => {
+      (c, r.getAs[String](c))
+    }).toMap
+    res
+  }
+
+}
+
+case class CompanyCourtAnnouncementV2(s: SparkSession, project: String, //表所在工程名
+                                    tableName: String //表名(不加前后辍)
+                                    //detailCols: Seq[String] // 详情字段
+                                   ) extends LoggingUtils with CompanyMapping with Logging with BaseFunc {
+
+  @(transient@getter) val spark: SparkSession = s
+
+  def calc(runOld: Boolean = false) = {
+    import spark.implicits._
+
+    val inc_ads_company_tb_list = s"${project}.inc_ads_${tableName}_list_v1" //增量ads_list表
+
+    var adsListDs = getPartion(inc_ads_company_tb_list, spark)
+    //跑存量取第一个分区
+    if (runOld) {
+      adsListDs = getFirstPartion(inc_ads_company_tb_list, spark)
+    }
+    val ads_eci_debtor_relation = s"winhc_eci.ads_eci_debtor_relation_v2" //债权全量表
+    val debtorRelationDs = getPartion(ads_eci_debtor_relation, spark)
+
+    //结果表导入生产表
+    val ads_address = s"winhc_eci.inc_ads_${tableName}_address" //增量地址表
+    val ads_yg_bg = s"winhc_eci.inc_ads_${tableName}_bg_yg" //增量原被告-原告表
+
+    //被告
+    val df = sql(
+      s"""
+         |SELECT  *
+         |FROM    (
+         |            SELECT  *
+         |                    ,ROW_NUMBER() OVER (PARTITION BY litigant_name ORDER BY publish_date DESC ) num
+         |                    ,md5(CLEANUP(concat_ws('',plaintiff,litigant,announcement_type,publish_date,case_no,litigant_name))) AS rowkey_business
+         |            FROM    $inc_ads_company_tb_list a
+         |            WHERE   ds = $adsListDs
+         |            AND     LENGTH(litigant_name) > 4
+         |            AND     announcement_type = '起诉状副本及开庭传票'
+         |            AND     publish_date >= '${atMonthsBefore(1)}'
+         |        ) b
+         |WHERE   num = 1
+         |""".stripMargin)
+
+    val all_cols: Seq[String] = spark.table(inc_ads_company_tb_list).schema.map(_.name).filter(s => {
+      !s.equals("ds")
+    })
+    val cols_md5 = all_cols ++ Seq("rowkey_business")
+
+    df.select(cols_md5.map(column => col(column).cast("string")): _*).mapPartitions(iter => {
+      trans(iter)
+      //restClient.close()
+    }).filter(_ != null)
+      .toDF("rowkey", "title", "plaintiff", "litigant", "company_name", "company_id", "label", "business_id",
+        "business_type", "business_type_name", "dynamic_content", "publish_date", "create_time", "province_code", "city_code", "county_code")
+      .createOrReplaceTempView("t1")
+
+    //案源机会表
+    sql(
+      s"""
+         |insert ${if (isWindows) "INTO" else "OVERWRITE"} table $ads_yg_bg  partition (ds=$adsListDs)
+         |select
+         |'0' as flag,
+         |rowkey,title,plaintiff,litigant,company_name,company_id,label,business_id,business_type,business_type_name,dynamic_content,
+         |publish_date,create_time
+         |from t1
+         |""".stripMargin)
+
+    //债务人要素表
+    sql(
+      s"""
+         |insert ${if (isWindows) "INTO" else "OVERWRITE"} table $ads_address  partition (ds=$adsListDs)
+         |select
+         |md5(concat_ws('',rowkey,1,business_type,province_code,city_code)) as id,
+         |'0' as flag,
+         |rowkey,1 as address_type,province_code,city_code,county_code,business_type,publish_date,create_time
+         |from t1
+         |where trim(province_code) <> ''
+         |""".stripMargin)
+
+    //原告
+    sql(
+      s"""
+         |SELECT  *
+         |FROM    (
+         |            SELECT  *
+         |                    ,ROW_NUMBER() OVER (PARTITION BY plaintiff_name ORDER BY publish_date DESC) num
+         |                    ,md5(CLEANUP(concat_ws('',plaintiff,litigant,announcement_type,publish_date,case_no,plaintiff_name))) AS rowkey_business
+         |            FROM    $inc_ads_company_tb_list
+         |            WHERE   ds = $adsListDs
+         |            AND     announcement_type = '起诉状副本及开庭传票'
+         |            AND     LENGTH(plaintiff_name) > 4
+         |            AND     plaintiff_name not like '%银行%'
+         |            AND     plaintiff_name not like '%保险%'
+         |        ) x
+         |WHERE   num = 1 AND publish_date >= '${atMonthsBefore(3)}'
+         |""".stripMargin).cache().createOrReplaceTempView("announcement")
+
+    sql(
+      s"""
+         |SELECT  d.*,bg_cid,bg_city_name
+         |FROM    announcement d
+         |JOIN    (
+         |            SELECT  bg_name,bg_cid,bg_city_name
+         |            FROM    $ads_eci_debtor_relation
+         |            WHERE   ds = $debtorRelationDs
+         |            AND     deleted = 0
+         |            group by bg_name,bg_cid,bg_city_name
+         |        ) e
+         |ON      cleanup(d.plaintiff_name) = cleanup(e.bg_name)
+         |""".stripMargin).map(r => {
+      trans2(r)
+    })
+      //.filter(_ != null)
+      .toDF("rowkey", "title", "plaintiff", "litigant", "company_name", "company_id", "label", "business_id",
+        "business_type", "business_type_name", "dynamic_content", "publish_date", "create_time")
+      .createOrReplaceTempView("t2")
+
+    sql(
+      s"""
+         |insert into table $ads_yg_bg  partition (ds=$adsListDs)
+         |select
+         |'1' as flag,
+         |rowkey,title,plaintiff,litigant,company_name,company_id,label,business_id,business_type,business_type_name,dynamic_content,
+         |publish_date,create_time
+         |from t2
+         |""".stripMargin)
+
+
+    //地址表
+    sql(
+      s"""
+         |SELECT  d.*
+         |        ,bg_name
+         |        ,bg_cid
+         |        ,bg_reg_status
+         |        ,bg_province_code
+         |        ,bg_city_code
+         |        ,bg_county_code
+         |        ,bg_reg_location
+         |        ,bg_estiblish_time
+         |        ,bg_category_code
+         |        ,bg_reg_capital
+         |        ,bg_phones
+         |        ,bg_emails
+         |        ,yg_name
+         |        ,yg_cid
+         |        ,yg_reg_status
+         |        ,yg_province_code
+         |        ,yg_city_code
+         |        ,yg_county_code
+         |        ,yg_reg_location
+         |        ,yg_estiblish_time
+         |        ,yg_category_code
+         |        ,yg_reg_capital
+         |        ,yg_phones
+         |        ,yg_emails
+         |        ,rowkey_business
+         |FROM    announcement d
+         |JOIN    (
+         |            SELECT  *
+         |            FROM    $ads_eci_debtor_relation
+         |            WHERE   ds = $debtorRelationDs
+         |            AND     deleted = 0
+         |        ) e
+         |ON      cleanup(d.plaintiff_name) = cleanup(e.bg_name)
+         |""".stripMargin).createOrReplaceTempView("t3")
+
+    sql(
+      s"""
+         |insert into table $ads_address  partition (ds=$adsListDs)
+         |SELECT
+         |        md5(concat_ws('',rowkey_business,address_type,business_type,province_code,city_code)) as id
+         |        ,'1' as flag
+         |        ,rowkey_business
+         |        ,address_type
+         |        ,province_code
+         |        ,city_code
+         |        ,county_code
+         |        ,business_type
+         |        ,publish_date
+         |        ,create_time
+         |FROM    (
+         |            SELECT  *
+         |                    ,ROW_NUMBER() OVER (PARTITION BY rowkey_business,province_code,city_code,address_type ORDER BY publish_date DESC) num
+         |            FROM    (
+         |                        SELECT  rowkey_business
+         |                                ,1 AS address_type
+         |                                ,bg_province_code AS province_code
+         |                                ,bg_city_code AS city_code
+         |                                ,bg_county_code AS county_code
+         |                                ,"7" AS business_type
+         |                                ,publish_date
+         |                                ,substr(from_unixtime(unix_timestamp()),1,10) AS create_time
+         |                        FROM    t3
+         |                        UNION ALL
+         |                        SELECT  rowkey_business
+         |                                ,0 AS address_type
+         |                                ,yg_province_code AS province_code
+         |                                ,yg_city_code AS city_code
+         |                                ,yg_county_code AS county_code
+         |                                ,"7" AS business_type
+         |                                ,publish_date
+         |                                ,substr(from_unixtime(unix_timestamp()),1,10) AS create_time
+         |                        FROM    t3
+         |                    ) a
+         |        ) b
+         |WHERE num = 1 AND trim(province_code) <> ''
+         |""".stripMargin)
+  }
+
+  def trans(iter: Iterator[Row]) = {
+    val restClient = EsQueryV2.getClient("es.eci.nodes")
+    val df = iter.map(r => {
+      try {
+        import org.json4s.DefaultFormats
+        val rowkey_business = r.getAs[String]("rowkey_business") //案源机会主键
+        val title = "" //标题
+        val plaintiff = r.getAs[String]("plaintiff") //原告
+        val litigant = r.getAs[String]("litigant") //当事人
+        val litigant_name = r.getAs[String]("litigant_name") //被告企业
+        val business_id = r.getAs[String]("rowkey") //业务主键id
+        val business_type = "8" //动态类型
+        val business_type_name = "0" //动态类型name
+        val m1: Map[String, String] = EsQueryV2.queryCompany(restClient, litigant_name)
+        //标签列表
+        val label: String = Json(DefaultFormats).write(
+         CourtAnnouncementV2(r, Seq("announcement_type", "publish_date")) ++ Map("city_name" -> m1("city_name"))
+        )
+        //动态变更内容
+        val m2: Map[String, String] =CourtAnnouncementV2(r, Seq("plaintiff",
+          "litigant", "announcement_type", "court_name", "publish_date", "content"))
+        val dynamic_content = Json(DefaultFormats).write(m1 ++: m2)
+        val publish_date = r.getAs[String]("publish_date") //动态变更时间
+        val create_time = atMonthsBefore(0, "yyyy-MM-dd HH:mm:ss") //创建时间
+        val province_code = m1.getOrElse("province_code", "") //省code
+        val city_code = m1.getOrElse("city_code", "") //市code
+        val county_code = m1.getOrElse("county_code", "") //区code
+        val litigant_cid = m1.getOrElse("id", "") //企业cid
+        (rowkey_business, title, plaintiff, litigant, litigant_name, litigant_cid, label, business_id, business_type,
+          business_type_name, dynamic_content, publish_date, create_time, province_code, city_code, county_code)
+      } catch {
+        case e: Exception => {
+          logWarning(r.toString())
+          logError(e.getMessage, e)
+          null
+        }
+      }
+    })
+    df
+  }
+
+  def trans2(r: Row) = {
+    import org.json4s.DefaultFormats
+    val rowkey_business = r.getAs[String]("rowkey_business") //案源机会主键
+    val title = "" //标题
+    val plaintiff = r.getAs[String]("plaintiff") //原告
+    val litigant = r.getAs[String]("litigant") //当事人
+    val plaintiff_name = r.getAs[String]("plaintiff_name") //原告企业
+    val plaintiff_cid = r.getAs[String]("bg_cid") //原告企业
+    val city_name = r.getAs[String]("bg_city_name") //原告企业
+    val label: String = Json(DefaultFormats).write(
+     CourtAnnouncementV2(r, Seq("announcement_type", "publish_date")) ++ Map("city_name" -> city_name)
+    ) //标签列表
+    val business_id = r.getAs[String]("rowkey") //业务主键id
+    val business_type = "7" //动态类型
+    val business_type_name = "0" //动态类型name
+    //动态变更内容
+    val m2: Map[String, String] =CourtAnnouncementV2(r, Seq("plaintiff",
+      "litigant", "announcement_type", "court_name", "publish_date", "content"))
+    val dynamic_content = Json(DefaultFormats).write(m2)
+    val publish_date = r.getAs[String]("publish_date") //动态变更时间
+    val create_time = atMonthsBefore(0, "yyyy-MM-dd HH:mm:ss") //创建时间
+    (rowkey_business, title, plaintiff, litigant, plaintiff_name, plaintiff_cid, label, business_id, business_type,
+      business_type_name, dynamic_content, publish_date, create_time)
+  }
+
+  def regfun() = {
+    prepareFunctions(spark)
+    company_split()
+  }
+
+}
+
+object CompanyCourtAnnouncementV2 {
+  def main(args: Array[String]): Unit = {
+    var project = ""
+    var table = ""
+    var runOld = false
+
+    if (args.length == 2) {
+      val Array(project1, table1) = args
+      project = project1
+      table = table1
+    } else if (args.length == 3) {
+      val Array(project1, table1, remain) = args
+      project = project1
+      table = table1
+      if (remain.equals("1"))
+        runOld = true
+    } else {
+      println("please set project,table...")
+      sys.exit(-1)
+    }
+
+    println(
+      s"""
+         |project: $project| table: $table| runOld: $runOld
+         |""".stripMargin)
+
+    val config = mutable.Map(
+      "spark.hadoop.odps.project.name" -> "winhc_eci_dev",
+      "spark.hadoop.odps.spark.local.partition.amt" -> "100"
+    )
+    val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
+    val announcement = CompanyCourtAnnouncementV2(spark, project, table)
+    announcement.regfun()
+    announcement.calc()
+    spark.stop()
+  }
+
+}
+
+object EsQueryV2 {
+
+  def main(args: Array[String]): Unit = {
+    val restClient = getClient("es.eci.nodes")
+    val id = queryCompany(restClient, "华为技术有限公司")
+    println(id)
+    restClient.close()
+  }
+
+  def getClient(nodes: String): RestClient = {
+    val credentialsProvider = new BasicCredentialsProvider();
+    credentialsProvider.setCredentials(AuthScope.ANY,
+      new UsernamePasswordCredentials("elastic", "elastic_168"))
+    val restClient = RestClient.builder(new HttpHost(EnvConst.getEnv().getValue(nodes), 9200))
+      .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
+        override def customizeHttpClient(httpClientBuilder: HttpAsyncClientBuilder): HttpAsyncClientBuilder = httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)
+      }).build()
+    restClient
+  }
+
+  def queryCompany(restClient: RestClient, companyName: String) = {
+
+    val query =
+      s"""
+         |{
+         |  "_source": {
+         |    "includes": [
+         |      "_id",
+         |      "province_code",
+         |      "city_code",
+         |      "county_code",
+         |      "reg_capital",
+         |      "estiblish_time",
+         |      "phones",
+         |      "category_first_code",
+         |      "category_second_code",
+         |      "category_third_code"
+         |    ]
+         |  },
+         |  "query": {
+         |    "term": {
+         |      "cname.value.keyword": "${BaseUtil.cleanup(companyName)}"
+         |    }
+         |  },
+         |  "sort": [
+         |    {
+         |      "company_type": {
+         |        "order": "asc"
+         |      }
+         |    }
+         |  ]
+         |}
+         |""".stripMargin
+    val entity = new NStringEntity(query, ContentType.APPLICATION_JSON)
+
+    val indexResponse = restClient.performRequest(
+      "GET",
+      "/winhc-company-v8/company/_search",
+      Collections.emptyMap[String, String](),
+      entity)
+    val en = indexResponse.getEntity
+    val res = EntityUtils.toString(en)
+    import scala.collection.JavaConverters._
+    val list = getIndexResult2(res)
+    if (list.nonEmpty) {
+      val id = list.head("_id").asInstanceOf[String]
+      val source: util.Map[String, Any] = list.head("_source").asInstanceOf[util.Map[String, Any]]
+      val province_code = source.get("province_code").asInstanceOf[String]
+      val city_code = source.get("city_code").asInstanceOf[String]
+      val county_code = source.get("county_code").asInstanceOf[String]
+      val reg_capital = source.get("reg_capital").asInstanceOf[String]
+      val estiblish_time = source.get("estiblish_time").asInstanceOf[String]
+      val phones = source.get("phones").asInstanceOf[util.List[String]].asScala.mkString(",")
+
+      val category_first = source.get("category_first").asInstanceOf[String]
+      val category_second = source.get("category_second").asInstanceOf[String]
+      val category_third = source.get("category_third").asInstanceOf[String]
+
+      Map(
+        "id" -> id,
+        "province_code" -> province_code,
+        "city_code" -> city_code,
+        "county_code" -> county_code,
+        "reg_capital" -> reg_capital,
+        "estiblish_time" -> estiblish_time,
+        "phones" -> phones,
+        "category_first" -> category_first,
+        "category_second" -> category_second,
+        "category_third" -> category_third
+      )
+    } else {
+      Map.empty[String, String]
+    }
+  }
+
+  def getIndexResult2(json: String) = {
+    import scala.collection.JavaConverters._
+    JSON.parseObject(json).getJSONObject("hits").getJSONArray("hits").toArray().map(m => m.asInstanceOf[util.Map[String, Any]]).map(_.asScala).toList
+  }
+}
+
+
+

+ 363 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/GoodNewsV2.scala

@@ -0,0 +1,363 @@
+package com.winhc.bigdata.spark.ng.chance
+
+import com.winhc.bigdata.spark.config.EsConfig
+import com.winhc.bigdata.spark.utils.ReflectUtils.getClazz
+import com.winhc.bigdata.spark.utils.{AsyncExtract, LoggingUtils, SparkUtils}
+import org.apache.commons.lang3.StringUtils
+import org.apache.spark.internal.Logging
+import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.types.StringType
+import com.winhc.bigdata.spark.udf.{BaseFunc, CaseChanceFunc}
+import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+
+import scala.annotation.meta.getter
+import scala.collection.immutable.ListMap
+import scala.collection.mutable
+
+/**
+ * @Author: π
+ * @Date: 2021/9/9
+ * @Description: 利好消息
+ */
+object GoodNewsV2 {
+
+  case class CompanyMonitorUtil(s: SparkSession,
+                                project: String, //表所在工程名
+                                incr: Boolean //是否增量
+                               ) extends LoggingUtils with Logging with BaseFunc with CaseChanceFunc {
+    @(transient@getter) val spark: SparkSession = s
+
+    val inc_ads_eci_debtor_relation = "winhc_ng.inc_ads_eci_debtor_relation"
+    val target_ads_case_chance = "winhc_ng.ads_case_chance_good_news"
+    val target_ads_case_chance_element = "winhc_ng.ads_case_chance_element_good_news"
+
+    //private lazy val org_tab = if (incr) "winhc_ng.bds_change_extract" else "winhc_ng.bds_change_extract_all_v2"
+    private lazy val org_tab = "winhc_ng.bds_change_extract"
+    private var ds = getLastPartitionsOrElse(org_tab, "0")
+    if (!incr) {
+      ds = "0"
+    }
+
+    val relation_cols: Seq[String] = getColumns(inc_ads_eci_debtor_relation).filter(!_.equals("ds"))
+    val eci_debtor_rel_ds: String = getLastPartitionsOrElse(inc_ads_eci_debtor_relation, "0")
+
+
+    //不同name映射table
+    val tabMapping =
+      Map("company_court_announcement_v1" -> "company_court_announcement" //开庭公告-被告
+        , "wenshu_detail_v2_bg_yishen" -> "wenshu_detail_v2" //文书
+        , "wenshu_detail_v2_yg_yishen" -> "wenshu_detail_v2" //文书
+        , "wenshu_detail_v2_yg_zhongben" -> "wenshu_detail_v2" //文书
+      )
+
+    //转换字段
+    def trans(s: String): String = {
+      if (tabMapping.contains(s)) return tabMapping(s)
+      s
+    }
+
+    def calc(tableName1: String, mode: String): Unit = {
+      cleanup()
+      json_utils()
+      json_add_kv()
+      map_2_json()
+      trans_number()
+      chance_dynamic_type()
+
+      val tableName2 = trans(tableName1)
+
+      val clazz = getClazz[CompanyChanceHandle](s"com.winhc.bigdata.spark.ng.chance.table.$tableName1", incr)
+      val conditional = clazz.get_conditional_filter()
+      val filter = clazz.filter
+      val flat_map = clazz.flat_map
+      val tn = tableName2
+
+      //-- WHERE   ds = '$ds'
+      val rdd = sql(
+        s"""
+           |SELECT  *
+           |FROM    $org_tab
+           |WHERE   ds >= '$ds'
+           |AND     tn = '$tn'
+           |$conditional
+           |""".stripMargin)
+        .rdd.map(r => {
+        val value = r.getAs[String]("change_fields")
+        val change_fields: Seq[String] = if (StringUtils.isEmpty(value)) Seq.empty else value.split(",")
+        ChangeExtract(rowkey = r.getAs("rowkey")
+          , company_id = r.getAs("company_id")
+          , company_name = null
+          , tn = r.getAs("table_name")
+          , update_type = r.getAs("update_type")
+          , old_data = r.getAs("old_data")
+          , new_data = r.getAs("new_data")
+          , change_fields = change_fields
+          , biz_date = r.getAs("biz_date")
+          , update_time = r.getAs("update_time")
+        )
+      }).filter(r => {
+        if (filter == null) {
+          true
+        } else {
+          filter(r.update_type, r.biz_date, r.change_fields, r.old_data, r.new_data)
+        }
+      }).flatMap(flat_map)
+        .map(_.format())
+        .filter(_ != null)
+
+      val schema = getSchema(ListMap(
+        "id" -> StringType
+        , "rowkey" -> StringType
+        , "company_id" -> StringType
+        , "title" -> StringType
+        , "label" -> StringType
+        , "change_time" -> StringType
+        , "update_time" -> StringType
+        , "update_type" -> StringType
+        , "deleted" -> StringType
+        , "new_data" -> StringType
+        , "plaintiff" -> StringType
+        , "defendant" -> StringType
+      ))
+
+      spark.createDataFrame(rdd.map(_.to_row()), schema)
+        .createOrReplaceTempView(s"good_news_debtor_relation_view$tableName1")
+
+      mode match {
+        case "0" => {
+          sql(
+            s"""
+               |SELECT  *
+               |FROM    (
+               |            SELECT  ${relation_cols.map(n => s"$n as $n").mkString(",")}
+               |            FROM    $inc_ads_eci_debtor_relation
+               |            WHERE   ds = '$eci_debtor_rel_ds'
+               |            and deleted = 0
+               |        ) AS t1
+               |JOIN (
+               |         SELECT
+               |           *
+               |         FROM good_news_debtor_relation_view$tableName1
+               |      ) AS t2
+               |ON      t1.bg_company_id = t2.company_id
+               |WHERE t1.bg_name not like concat('%','银行','%') AND t1.bg_name not like concat('%','保险','%')
+               |""".stripMargin).createOrReplaceTempView(s"good_news_debtor_relation_view${tableName1}v2")
+
+
+          sql(
+            s"""
+               |INSERT  INTO TABLE $target_ads_case_chance_element PARTITION(ds='$ds', tn='$tableName1')
+               |SELECT  md5(cleanup(CONCAT_WS('',case_chance_id,case_chance_type,type,province,city,dynamic_time))) AS id
+               |        ,CASE_CHANCE_ID
+               |        ,TYPE
+               |        ,province
+               |        ,city
+               |        ,county
+               |        ,dynamic_time
+               |        ,public_date
+               |        ,CASE_CHANCE_TYPE
+               |FROM    (
+               |            SELECT  *
+               |                    ,ROW_NUMBER() OVER(PARTITION BY CASE_CHANCE_ID,TYPE,PROVINCE,city ORDER BY CASE_CHANCE_ID) AS num
+               |            FROM    (
+               |                        SELECT   rowkey AS CASE_CHANCE_ID
+               |                                ,0 AS TYPE
+               |                                ,get_table_type('$tableName1') AS CASE_CHANCE_TYPE
+               |                                ,yg_province_code AS PROVINCE
+               |                                ,yg_city_code AS city
+               |                                ,yg_county_code AS county
+               |                                ,change_time AS dynamic_time
+               |                                ,update_time AS public_date
+               |                        FROM    good_news_debtor_relation_view${tableName1}v2
+               |                        UNION ALL
+               |                        SELECT   rowkey AS CASE_CHANCE_ID
+               |                                ,1 AS TYPE
+               |                                ,get_table_type('$tableName1') AS CASE_CHANCE_TYPE
+               |                                ,bg_province_code AS PROVINCE
+               |                                ,bg_city_code AS city
+               |                                ,bg_county_code AS county
+               |                                ,change_time AS dynamic_time
+               |                                ,update_time AS public_date
+               |                        FROM    good_news_debtor_relation_view${tableName1}v2
+               |                    )
+               |        ) AS t
+               |WHERE   t.num = 1
+               |""".stripMargin)
+
+          sql(
+            s"""
+               |
+               |INSERT INTO TABLE $target_ads_case_chance PARTITION(ds='$ds', tn='$tableName1')
+               |SELECT  rowkey AS case_chance_id
+               |        -- ,title AS title
+               |        ,bg_name as title
+               |        ,plaintiff
+               |        ,defendant
+               |        ,bg_name AS company_name
+               |        ,company_id
+               |        ,json_add_str(label,CONCAT_WS(',',get_json_kv('reg_capital',trans_number(bg_reg_capital)),get_json_kv('province_code',bg_province_code),get_json_kv('city_code',bg_city_code),get_json_kv('county_code',bg_county_code),get_json_kv('estiblish_time',bg_estiblish_time) ,get_json_kv('province_name',bg_province_name),get_json_kv('city_name',bg_city_name),get_json_kv('county_name',bg_county_name),get_json_kv('category_first',bg_category_first_name)  )) AS tags
+               |        ,rowkey AS biz_id
+               |        ,get_table_type('$tableName1') AS type
+               |        ,get_chance_dynamic_type('$tableName1') AS dynamic_type
+               |        ,new_data AS dynamic_content
+               |        ,change_time AS dynamic_time
+               |        ,update_time AS public_date
+               |FROM    (
+               |            SELECT  *
+               |                    ,ROW_NUMBER() OVER(PARTITION BY rowkey ORDER BY rowkey) AS num
+               |            FROM    good_news_debtor_relation_view${tableName1}v2
+               |        ) AS t
+               |WHERE   t.num = 1
+               |""".stripMargin)
+        }
+        case "1" => {
+
+          sql(
+            s"""
+               |SELECT  t2.*,
+               |         name,
+               |         province_code,
+               |         city_code,
+               |         county_code,
+               |         reg_capital,
+               |         estiblish_time,
+               |         phones,
+               |         cate_first_code,
+               |         cate_second_code,
+               |         cate_third_code
+               |FROM    (
+               |            SELECT  * FROM winhc_ng.tmp_xf_company_all
+               |        ) AS t1
+               |JOIN (
+               |         SELECT
+               |           *
+               |         FROM good_news_debtor_relation_view$tableName1
+               |      ) AS t2
+               |ON      t1.company_id = t2.company_id
+               |WHERE t1.name not like concat('%','银行','%') AND t1.name not like concat('%','保险','%')
+               |""".stripMargin).createOrReplaceTempView(s"good_news_debtor_relation_view${tableName1}v2")
+
+          //债务人要素表
+          sql(
+            s"""
+               |INSERT INTO TABLE $target_ads_case_chance_element  PARTITION (ds=$ds, tn='$tableName1')
+               |SELECT   md5(cleanup(CONCAT_WS('',case_chance_id,case_chance_type,type,province_code,city_code,dynamic_time))) AS id
+               |        ,case_chance_id
+               |        ,type
+               |        ,province_code
+               |        ,city_code
+               |        ,county_code
+               |        ,dynamic_time
+               |        ,public_date
+               |        ,case_chance_type
+               |FROM
+               |(
+               |select
+               |         rowkey AS case_chance_id
+               |        ,0 AS type
+               |        ,province_code
+               |        ,city_code
+               |        ,county_code
+               |        ,change_time AS dynamic_time
+               |        ,update_time AS public_date
+               |        ,get_table_type('$tableName1') AS case_chance_type
+               |from good_news_debtor_relation_view${tableName1}v2
+               |)
+               |WHERE trim(province_code) <> ''
+               |""".stripMargin)
+
+          //案源机会表
+          sql(
+            s"""
+               |INSERT INTO TABLE $target_ads_case_chance  PARTITION (ds=$ds, tn='$tableName1')
+               |SELECT
+               |     rowkey as case_chance_id
+               |    ,title
+               |    ,plaintiff
+               |    ,defendant
+               |    ,name as company_name
+               |    ,company_id
+               |    ,label as tags
+               |    ,rowkey as biz_id
+               |    ,get_table_type('$tableName1') AS type
+               |    ,get_chance_dynamic_type('$tableName1') AS dynamic_type
+               |    ,new_data AS dynamic_content
+               |    ,change_time AS dynamic_time
+               |    ,update_time AS public_date
+               |FROM good_news_debtor_relation_view${tableName1}v2
+               |""".stripMargin)
+
+        }
+      }
+    }
+  }
+
+  private val startArgs = Seq(
+    Args(tableName = "company_equity_info")
+    , Args(tableName = "company_tm")
+    , Args(tableName = "company_patent")
+    , Args(tableName = "company_copyright_works")
+    , Args(tableName = "company_copyright_reg")
+    , Args(tableName = "company_land_publicity")
+    , Args(tableName = "company_land_announcement")
+    , Args(tableName = "company_bid")
+    , Args(tableName = "company_land_transfer")
+    , Args(tableName = "company_employment")
+    , Args(tableName = "company_certificate")
+    , Args(tableName = "company_zxr_restrict")
+    , Args(tableName = "company_zxr")
+    , Args(tableName = "company_dishonest_info")
+    , Args(tableName = "company_court_announcement")
+    , Args(tableName = "bankruptcy_open_case")
+    , Args(tableName = "wenshu_detail_v2_bg_yishen")
+    , Args(tableName = "company_court_announcement_v1", mode = "1")
+    , Args(tableName = "wenshu_detail_v2_yg_yishen", mode = "1")
+    , Args(tableName = "wenshu_detail_v2_yg_zhongben", mode = "1")
+  )
+
+  private case class Args(project: String = "winhc_ng"
+                          , tableName: String
+                          , mode: String = "0") //处理模式
+
+  def main(args: Array[String]): Unit = {
+
+    if (args.length != 3) {
+      println(
+        s"""
+           |Please enter the legal parameters !
+           |<project> <tableNames> <incr>
+           |""".stripMargin)
+      sys.exit(-99)
+    }
+
+    val Array(project, tableNames, incr) = args
+
+    println(
+      s"""
+         |project: $project
+         |tableNames: $tableNames
+         |incr: $incr
+         |""".stripMargin)
+
+    val config = EsConfig.getEsConfigMap ++ mutable.Map(
+      "spark.hadoop.odps.project.name" -> project,
+      "spark.hadoop.odps.spark.local.partition.amt" -> "1000"
+    )
+    val spark = SparkUtils.InitEnv("CompanyMonitorV2", config)
+    val cd = CompanyMonitorUtil(spark, project, if ("incr".equals(incr)) true else false)
+
+    var start = startArgs
+    if (!tableNames.equals("all")) {
+      val set = tableNames.split(",").toSet
+      start = start.filter(a => set.contains(a.tableName))
+    }
+
+    val a = start.map(e => (e.tableName, () => {
+      cd.calc(e.tableName, e.mode) //通用处理
+      true
+    }))
+
+    AsyncExtract.startAndWait(spark, a)
+    spark.stop()
+  }
+}

+ 51 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/bankruptcy_open_case.scala

@@ -0,0 +1,51 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+import org.json4s.DefaultFormats
+import org.json4s.jackson.Json
+
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 破产公告
+ */
+case class bankruptcy_open_case(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def get_label(new_data: Map[String, String]): String = {
+    val m: Map[String, String] = Map(
+      "agency_court" -> new_data.getOrEmptyStr("agency_court"),
+      "case_type" -> new_data.getOrEmptyStr("case_type")
+    )
+    Json(DefaultFormats).write(m)
+  }
+
+  override def get_party_set(new_data: Map[String, String]): Seq[String] = {
+    Seq(
+      new_data.getOrEmptyStr("applicant"),
+      new_data.getOrEmptyStr("respondent")
+    )
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("respondent_info"), "litigant_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_bid.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:招投标
+ */
+case class company_bid(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("purchaser_info"), "keyno")
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 33 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_certificate.scala

@@ -0,0 +1,33 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:资质证书
+ */
+case class company_certificate(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("company_id"))
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_copyright_reg.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:软件著作权
+ */
+case class company_copyright_reg(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("author_nationality_info"), "keyno")
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 33 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_copyright_works.scala

@@ -0,0 +1,33 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:作品著作权
+ */
+case class company_copyright_works(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("author_info"), "keyno")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 54 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_court_announcement.scala

@@ -0,0 +1,54 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+import org.json4s.{DefaultFormats, Formats, NoTypeHints}
+import org.json4s.jackson.{Json, Serialization}
+import org.json4s.jackson.Serialization.write
+
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 法院公告-原告
+ */
+case class company_court_announcement(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    "AND  new_data['announcement_type'] = '起诉状副本及开庭传票' "
+  }
+
+  override def get_label(new_data: Map[String, String]): String = {
+    val m: Map[String, String] = Map(
+      "announcement_type" -> new_data.getOrEmptyStr("announcement_type"),
+      "publish_date" -> new_data.getOrEmptyStr("publish_date")
+    )
+    Json(DefaultFormats).write(m)
+  }
+
+  override def get_party_set(new_data: Map[String, String]): Seq[String] = {
+    Seq(
+      new_data.getOrEmptyStr("plaintiff"),
+      new_data.getOrEmptyStr("litigant")
+    )
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    //Seq(new_data.getOrEmptyStr("related_company_id")) ++
+    getEntity(new_data.getOrEmptyStr("plaintiff_info"), "litigant_id") //++
+    //getEntity(new_data.getOrEmptyStr("pledgee_info"), "pledgee_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 51 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_court_announcement_v1.scala

@@ -0,0 +1,51 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+import org.json4s.DefaultFormats
+import org.json4s.jackson.Json
+
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 法院公告-被告
+ */
+case class company_court_announcement_v1(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    "AND  new_data['announcement_type'] = '起诉状副本及开庭传票' "
+  }
+
+  override def get_label(new_data: Map[String, String]): String = {
+    val m: Map[String, String] = Map(
+      "announcement_type" -> new_data.getOrEmptyStr("announcement_type"),
+      "publish_date" -> new_data.getOrEmptyStr("publish_date")
+    )
+    Json(DefaultFormats).write(m)
+  }
+
+  override def get_party_set(new_data: Map[String, String]): Seq[String] = {
+    Seq(
+      new_data.getOrEmptyStr("plaintiff"),
+      new_data.getOrEmptyStr("litigant")
+    )
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("litigant_info"), "litigant_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 34 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_court_open_announcement.scala

@@ -0,0 +1,34 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 开庭公告
+ */
+case class company_court_open_announcement(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String ={
+    "AND  new_data['case_no'] like concat('%','恢','%')"
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    //Seq(new_data.getOrEmptyStr("related_company_id")) ++
+      getEntity(new_data.getOrEmptyStr("defendant_info"), "litigant_id") //++
+      //getEntity(new_data.getOrEmptyStr("pledgee_info"), "pledgee_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_dishonest_info.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:失信人
+ */
+case class company_dishonest_info(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("deleted")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("keyno"))
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 33 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_employment.scala

@@ -0,0 +1,33 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:招聘
+ */
+case class company_employment(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("company_id"))
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_equity_info.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 股权出质-出质人
+ */
+case class company_equity_info(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String ={
+    //"AND  new_data['case_no'] like concat('%','恢','%')"
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    //Seq(new_data.getOrEmptyStr("related_company_id")) ++
+      getEntity(new_data.getOrEmptyStr("pledgor_info"), "pledgor_id") //++
+      //getEntity(new_data.getOrEmptyStr("pledgee_info"), "pledgee_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_holder.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:股东
+ */
+case class company_holder(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert") || update_type.equals("create")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("company_id"))
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 33 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_land_announcement.scala

@@ -0,0 +1,33 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:购地信息
+ */
+case class company_land_announcement(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("company_id"))
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 33 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_land_publicity.scala

@@ -0,0 +1,33 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:地块公示
+ */
+case class company_land_publicity(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("company_id"))
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_land_transfer.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:土地转让
+ */
+case class company_land_transfer(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("pre_keyno"))
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_patent.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:专利
+ */
+case class company_patent(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String ={
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("applicant_name_info"), "keyno")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_tm.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:商标
+ */
+case class company_tm(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("keyno"))
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_zxr.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:被执
+ */
+case class company_zxr(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("deleted")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("keyno"))
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/company_zxr_restrict.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description:限高
+ */
+case class company_zxr_restrict(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    ""
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("deleted")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    Seq(new_data.getOrEmptyStr("company_id"))
+  }
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 34 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2.scala

@@ -0,0 +1,34 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 裁判文书-恢复执行
+ */
+case class wenshu_detail_v2(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String ={
+    "AND  new_data['case_no'] like concat('%','恢','%')"
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    //Seq(new_data.getOrEmptyStr("related_company_id")) ++
+      getEntity(new_data.getOrEmptyStr("defendant_info"), "litigant_id") //++
+      //getEntity(new_data.getOrEmptyStr("pledgee_info"), "pledgee_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 53 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2_bg_yishen.scala

@@ -0,0 +1,53 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+import com.winhc.bigdata.spark.utils.BaseUtil
+import org.json4s.DefaultFormats
+import org.json4s.jackson.Json
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 裁判文书-被告-一审
+ */
+case class wenshu_detail_v2_bg_yishen(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    " AND  new_data['is_success'] = '胜' AND new_data['case_stage'] = '一审'  AND new_data['case_type'] = '民事案件' "
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def get_label(new_data: Map[String, String]): String = {
+    BaseUtil.getTags(new_data,"case_amt","judge_date","4")
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    //Seq(new_data.getOrEmptyStr("related_company_id")) ++
+    getEntity(new_data.getOrEmptyStr("defendant_info"), "litigant_id") //++
+    //getEntity(new_data.getOrEmptyStr("pledgee_info"), "pledgee_id")
+  }
+
+  /**
+   * 原告-被告 集合
+   *
+   * @return
+   */
+  override def get_party_set(new_map: Map[String, String]): Seq[String] = {
+    Seq(getEntity(new_map.getOrEmptyStr("plaintiff_info"), "name").mkString(",")
+      , getEntity(new_map.getOrEmptyStr("defendant_info"), "name").mkString(","))
+  }
+
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 48 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2_yg_yishen.scala

@@ -0,0 +1,48 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+import com.winhc.bigdata.spark.utils.BaseUtil
+import org.json4s.DefaultFormats
+import org.json4s.jackson.Json
+
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 裁判文书 - 原告 - 一审
+ */
+case class wenshu_detail_v2_yg_yishen(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    "AND  new_data['is_success'] = '胜' AND new_data['case_stage'] = '一审'  AND new_data['case_type'] = '民事案件'"
+  }
+
+  override def get_label(new_data: Map[String, String]): String = {
+    BaseUtil.getTags(new_data,"case_amt","judge_date","5")
+  }
+
+  override def get_party_set(new_data: Map[String, String]): Seq[String] = {
+    Seq(
+      getEntity(new_data.getOrEmptyStr("plaintiff_info"), "name").mkString(","),
+      getEntity(new_data.getOrEmptyStr("defendant_info"), "name").mkString(",")
+    )
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("plaintiff_info"), "litigant_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 53 - 0
src/main/scala/com/winhc/bigdata/spark/ng/chance/table/wenshu_detail_v2_yg_zhongben.scala

@@ -0,0 +1,53 @@
+package com.winhc.bigdata.spark.ng.chance.table
+
+import com.winhc.bigdata.spark.implicits.MapHelper._
+import com.winhc.bigdata.spark.ng.chance.{ChangeExtract, CompanyChanceHandle, CompanyChanceRecord, CompanyChanceUtils}
+import com.winhc.bigdata.spark.utils.BaseUtil
+import org.json4s.DefaultFormats
+import org.json4s.jackson.Json
+
+
+/**
+ * @Date: 2021/9/29
+ * @Description: 裁判文书 - 原告 - 终本
+ */
+case class wenshu_detail_v2_yg_zhongben(is_inc: Boolean) extends CompanyChanceHandle {
+
+  override def get_conditional_filter(): String = {
+    "AND  new_data['judge_result'] like '%终结本次执行程序%' AND new_data['case_stage'] = '一审'  AND new_data['case_type'] = '民事案件'"
+  }
+
+  override def get_label(new_data: Map[String, String]): String = {
+    BaseUtil.getTags(new_data,"case_amt","judge_date","6")
+//    val m: Map[String, String] = Map(
+//      "case_amt" -> BaseUtil.amt_div(new_data.getOrEmptyStr("case_amt"), "1",2).toString,
+//      "judge_date" -> new_data.getOrEmptyStr("judge_date").split(" ")(0)
+//    )
+//    Json(DefaultFormats).write(m)
+  }
+
+  override def get_party_set(new_data: Map[String, String]): Seq[String] = {
+    Seq(
+      getEntity(new_data.getOrEmptyStr("plaintiff_info"), "name").mkString(","),
+      getEntity(new_data.getOrEmptyStr("defendant_info"), "name").mkString(",")
+    )
+  }
+
+  override def filter: (String, String, Seq[String], Map[String, String], Map[String, String]) => Boolean = (update_type: String, biz_date: String, change_fields: Seq[String], old_data: Map[String, String], new_data: Map[String, String]) => {
+    if (CompanyChanceUtils.default_filter(update_type, biz_date, change_fields, old_data, new_data)) {
+      if (update_type.equals("insert")) true
+      else
+        false
+    } else {
+      false
+    }
+  }
+
+  override def getCompanyIds(new_data: Map[String, String]): Seq[String] = {
+    getEntity(new_data.getOrEmptyStr("plaintiff_info"), "litigant_id")
+  }
+
+  override def flat_map: ChangeExtract => Seq[CompanyChanceRecord] = (change_extract: ChangeExtract) => {
+    getCompanyChanceRecord(change_extract, getCompanyIds(change_extract.new_data))
+  }
+}

+ 1 - 0
src/main/scala/com/winhc/bigdata/spark/ng/change/NgChangeExtractArgs.scala

@@ -69,6 +69,7 @@ object NgChangeExtractArgs {
       ,NgChangeExtractArgs(tableName = "increase_registered_capital_info")    //增资记录
       ,NgChangeExtractArgs(tableName = "zxr_evaluate")    //询价评估-选定评估机构
       ,NgChangeExtractArgs(tableName = "company_bid")    //招投标
+      ,NgChangeExtractArgs(tableName = "company_employment")    //招聘
     )
 
 

+ 12 - 0
src/main/scala/com/winhc/bigdata/spark/ng/change/table/company_employment.scala

@@ -0,0 +1,12 @@
+
+
+package com.winhc.bigdata.spark.ng.change.table
+
+import com.winhc.bigdata.spark.ng.change.NgCompanyChangeHandle
+import com.winhc.bigdata.spark.utils.DateUtils
+
+case class company_employment(equCols: Seq[String], is_inc:Boolean) extends NgCompanyChangeHandle {
+
+
+  override protected def getBizDate(newMap: Map[String, String]): String = DateUtils.getBizDate(newMap("start_date"))
+}

+ 4 - 0
src/main/scala/com/winhc/bigdata/spark/udf/BaseFunc.scala

@@ -300,4 +300,8 @@ trait BaseFunc extends LoggingUtils {
       BKDRHash(case_nos.split(",").sorted.mkString(","))
     })
   }
+
+  def trans_number(): Unit = {
+    spark.udf.register("trans_number", (reg_capital: String) => BaseUtil.transNumber(reg_capital))
+  }
 }

File diff suppressed because it is too large
+ 38 - 9
src/main/scala/com/winhc/bigdata/spark/utils/BaseUtil.scala