Ver Fonte

feat: 动态

许家凯 há 3 anos atrás
pai
commit
95dea1d53a

+ 2 - 3
src/main/scala/com/winhc/bigdata/spark/ng/dynamic/CompanyDynamicRecord.scala

@@ -2,6 +2,7 @@ package com.winhc.bigdata.spark.ng.dynamic
 
 import com.winhc.bigdata.spark.implicits.CaseClass2JsonHelper._
 import com.winhc.bigdata.spark.ng.dynamic.NgCompanyRiskLevelType.NgCompanyRiskLevelType
+import com.winhc.bigdata.spark.ng.dynamic.utils.CollapseKeyArgs
 import org.apache.commons.lang3.StringUtils
 import org.apache.spark.sql.Row
 
@@ -93,9 +94,7 @@ case class CompanyDynamicRecord(id: String,
       , dynamic_info.toJson()
       , agg_detail_text
       , agg_detail_rowkey_str
-      , null
-      , null
-      , s"${tn}_${change_time}"
+      , s"${CollapseKeyArgs.transform(tn)}_${change_time}"
       , change_time
       , update_time
       , create_time

+ 2 - 2
src/main/scala/com/winhc/bigdata/spark/ng/dynamic/NgCompanyDynamic.scala

@@ -43,8 +43,8 @@ case class NgCompanyDynamic(s: SparkSession,
          |    ,dynamic_info STRING COMMENT '动态展示层的相关数据,json格式'
          |    ,agg_detail_text STRING COMMENT '聚合类型的json数据,规范优先'
          |    ,agg_detail_rowkey STRING COMMENT '聚合类型rowkey的多项rowkey字段,结构:tn@@rowkey,tn@@rowkey'
-         |    ,old_record STRING COMMENT '上一个版本数据json格式'
-         |    ,new_record STRING COMMENT '当前版本数据json格式'
+         |---    ,old_record STRING COMMENT '上一个版本数据json格式'
+         |---    ,new_record STRING COMMENT '当前版本数据json格式'
          |    ,collapse_key STRING COMMENT '用于折叠的字段,concat_ws("_",tn,change_time)'
          |    ,change_time string COMMENT '变更时间(业务展示 yyyy-MM-dd)'
          |    ,update_time STRING  COMMENT  '更新时间'

+ 18 - 0
src/main/scala/com/winhc/bigdata/spark/ng/dynamic/utils/CollapseKeyArgs.scala

@@ -0,0 +1,18 @@
+package com.winhc.bigdata.spark.ng.dynamic.utils
+
+/**
+ * @author: XuJiakai
+ * @date: 2021/7/9 14:16
+ *
+ *        折叠字段的单独处理
+ */
+object CollapseKeyArgs {
+
+  val map = Map(
+    "company_punishment_info" -> Seq("company_punishment_info", "company_punishment_info_creditchina")
+  )
+
+  private val m = map.flatMap(r => r._2.map((_ -> r._1))).filter(r => !r._2.equals(r._1))
+
+  def transform(tn: String): String = m.getOrElse(tn, tn)
+}

+ 5 - 1
src/main/scala/com/winhc/bigdata/spark/ng/dynamic/utils/DailyAggHandle.scala

@@ -31,7 +31,11 @@ abstract class DailyAggHandle() extends NgCompanyDynamicHandle {
     val update_time = change_extract.update_time
     var list: mutable.Seq[CompanyDynamicRecord] = mutable.Seq.empty
     val dy = getDynamicInfo(new_data)
-    list = list :+ getCompanyDynamicRecord(change_extract, dy._1, dy._2, null, getAssociationEntityInfo(new_data))
+    if (dy == null) {
+      list = list :+ getCompanyDynamicRecord(change_extract, null, null, null, getAssociationEntityInfo(new_data))
+    } else {
+      list = list :+ getCompanyDynamicRecord(change_extract, dy._1, dy._2, null, getAssociationEntityInfo(new_data))
+    }
     list
   }
 

Diff do ficheiro suprimidas por serem muito extensas
+ 88 - 10
src/main/scala/com/winhc/bigdata/spark/ng/dynamic/utils/DynamicFiledUtils.scala


+ 1 - 0
src/main/scala/com/winhc/bigdata/spark/ng/utils/Enum.scala

@@ -6,5 +6,6 @@ package com.winhc.bigdata.spark.ng.utils
  */
 trait Enum[_V] {
   type Value <: _V
+
   protected implicit def v2v(v: _V): Value = v.asInstanceOf[Value]
 }