Browse Source

fix: deleted 默认为0

许家凯 3 years ago
parent
commit
cd1a986e5e

+ 1 - 1
src/main/scala/com/winhc/bigdata/spark/ng/jobs/CompanyIndexJob.scala

@@ -249,7 +249,7 @@ case class CompanyIndexJob(s: SparkSession,
          |        ,logo
          |        ,reg_number
          |        ,company_score_weight
-         |        ,deleted
+         |        ,COALESCE(deleted,'0') AS deleted
          |from    $all_tab
          |""".stripMargin)
 

+ 18 - 2
src/main/scala/com/winhc/bigdata/spark/ng/jobs/general_handler.scala

@@ -64,7 +64,15 @@ case class general_handler(s: SparkSession,
     sql(
       s"""
          |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE $ads_tab PARTITION(ds='$inc_ods_ds')
-         |SELECT  ${getColumns(ads_tab).diff(Seq("ds")).mkString(",")}
+         |SELECT  ${
+        getColumns(ads_tab).diff(Seq("ds"))
+          .map(f => {
+            if (f.equalsIgnoreCase("deleted"))
+              "COALESCE(deleted,0) as deleted"
+            else
+              f
+          }).mkString(",")
+      }
          |FROM    (
          |            SELECT  *
          |                    ,ROW_NUMBER() OVER(PARTITION BY rowkey ORDER BY ds${up} DESC) AS num
@@ -122,7 +130,15 @@ case class general_handler(s: SparkSession,
     sql(
       s"""
          |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE $inc_ads_tab PARTITION(ds='$target_ds')
-         |SELECT  ${getColumns(inc_ads_tab).diff(Seq("ds")).mkString(",")}
+         |SELECT  ${
+        getColumns(inc_ads_tab).diff(Seq("ds"))
+          .map(f => {
+            if (f.equalsIgnoreCase("deleted"))
+              "COALESCE(deleted,0) as deleted"
+            else
+              f
+          }).mkString(",")
+      }
          |FROM    (
          |            SELECT  *
          |                    ,ROW_NUMBER() OVER(PARTITION BY rowkey ORDER BY ds${up} DESC) AS num

+ 18 - 2
src/main/scala/com/winhc/bigdata/spark/ng/jobs/inc_company_ng.scala

@@ -40,7 +40,16 @@ case class inc_company_ng(s: SparkSession,
     sql(
       s"""
          |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE $ads_tab PARTITION(ds='$inc_ods_ds')
-         |SELECT  ${getColumns(ads_tab).diff(Seq("ds")).mkString(",")}
+         |SELECT  ${
+        getColumns(ads_tab).diff(Seq("ds"))
+          .map(f => {
+            if (f.equalsIgnoreCase("deleted"))
+              "COALESCE(deleted,0) as deleted"
+            else
+              f
+          })
+          .mkString(",")
+      }
          |FROM    (
          |            SELECT  *
          |                    ,ROW_NUMBER() OVER(PARTITION BY company_id ORDER BY ds${up} DESC) AS num
@@ -95,7 +104,14 @@ case class inc_company_ng(s: SparkSession,
     sql(
       s"""
          |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE $inc_ads_tab PARTITION(ds='$target_ds')
-         |SELECT  ${cols.mkString(",")}
+         |SELECT  ${
+        cols.map(f => {
+          if (f.equalsIgnoreCase("deleted"))
+            "COALESCE(deleted,0) as deleted"
+          else
+            f
+        }).mkString(",")
+      }
          |FROM    (
          |            SELECT  *
          |                    ,ROW_NUMBER() OVER(PARTITION BY company_id ORDER BY ds${up} DESC) AS num

+ 3 - 3
src/main/scala/com/winhc/bigdata/spark/utils/DateUtils.scala

@@ -44,7 +44,7 @@ object DateUtils {
       p = "yyyy-MM-dd"
     }
     val fm = new SimpleDateFormat(p)
-    fm.parse(date).getTime + 28800000L + ""
+    fm.parse(date).getTime +  ""
   }
 
   def toUnixTimestamp(date: String, pattern: String = "yyyy-MM-dd HH:mm:ss"): Long = {
@@ -163,8 +163,8 @@ object DateUtils {
   }
 
   def main(args: Array[String]): Unit = {
-    //    println(DateUtils.toMillisTimestamp(date = "2020-09-17 18:02:02"))
-    println(getNotNullStr(null, null))
+        println(DateUtils.toMillisTimestamp(date = "2001-06-05 00:00:00"))
+//    println(getNotNullStr(null, null))
     //    println(getNotNullStr(null, "2003-10-12 10:00:00", null, "2003-11-12 00:00:02"))
   }