|
@@ -25,7 +25,8 @@ case class CompanyIncSummary(s: SparkSession,
|
|
|
private val f_bytes: Array[Byte] = Bytes.toBytes("F")
|
|
|
private val name_bytes: Array[Byte] = Bytes.toBytes(tableName.toUpperCase)
|
|
|
val updateTimeMapping = Map(
|
|
|
- "wenshu_detail_combine" -> "update_date" //文书排序时间
|
|
|
+ "wenshu_detail_combine" -> "update_date", //文书排序时间
|
|
|
+ "company_equity_info_list" -> "ds" //文书排序时间
|
|
|
)
|
|
|
|
|
|
def calc(): Unit = {
|
|
@@ -37,14 +38,16 @@ case class CompanyIncSummary(s: SparkSession,
|
|
|
.last
|
|
|
|
|
|
|
|
|
- val ads_table_cols = spark.table(ads_table).columns.filter(l => {
|
|
|
+ val ads_table_cols = spark.table(ads_table).columns
|
|
|
+ /* .filter(l => {
|
|
|
!l.equals("ds") && !l.equals("rowkey") && !l.equals("flag") && !l.equals("new_cids") && !l.equals("cids") && !l.equals("cid") && !l.equals("new_litigant_cids")
|
|
|
- }).toList.sorted
|
|
|
+ }).toList.sorted*/
|
|
|
|
|
|
|
|
|
- val inc_ads_table_cols = spark.table(inc_ads_table).columns.filter(l => {
|
|
|
+ val inc_ads_table_cols = spark.table(inc_ads_table).columns
|
|
|
+ /* .filter(l => {
|
|
|
!l.equals("ds") && !l.equals("rowkey") && !l.equals("flag") && !l.equals("new_cids") && !l.equals("cids") && !l.equals("cid") && !l.equals("new_litigant_cids")
|
|
|
- }).toList.sorted
|
|
|
+ }).toList.sorted*/
|
|
|
|
|
|
|
|
|
val new_cols = ads_table_cols.intersect(inc_ads_table_cols)
|