许家凯 4 년 전
부모
커밋
f188f2bb81
1개의 변경된 파일11개의 추가작업 그리고 4개의 파일을 삭제
  1. 11 4
      src/main/scala/com/winhc/bigdata/spark/utils/CompanyIncSummary.scala

+ 11 - 4
src/main/scala/com/winhc/bigdata/spark/utils/CompanyIncSummary.scala

@@ -50,11 +50,10 @@ case class CompanyIncSummary(s: SparkSession,
       println("cols not equals!")
       sys.exit(-99)
     }
+
     sql(
       s"""
-         |CREATE TABLE IF NOT EXISTS ${project}.xjk_tmp_count_$tableName as
-         |SELECT  ${cidField} as cid
-         |        ,COUNT(1) as num
+         |SELECT  ${new_cols.map(getCastCols(_, "")).mkString(",")}
          |FROM    (
          |            SELECT  tmp.*
          |                    ,ROW_NUMBER() OVER(PARTITION BY ${dupliCols.mkString(",")} ORDER BY update_time DESC ) c
@@ -78,9 +77,17 @@ case class CompanyIncSummary(s: SparkSession,
          |                    ) AS tmp
          |        ) tmp2
          |WHERE   tmp2.c = 1
+         |""".stripMargin).cache().createOrReplaceTempView("inc_tmp_view")
+
+    sql(
+      s"""
+         |CREATE TABLE IF NOT EXISTS winhc_eci_dev.xjk_tmp_count_company_icp AS
+         |SELECT  ${cidField} as cid
+         |        ,COUNT(1) as num
+         |FROM    inc_tmp_view
          |GROUP BY $cidField
          |""".stripMargin)
-//      .write.mode("overwrite").saveAsTable(s"${project}.xjk_tmp_count_$tableName")
+
     /* .rdd.map(row => {
      val id = row(0).asInstanceOf[String]
      val num = row(1).asInstanceOf[String]