|
@@ -2,11 +2,11 @@ package com.winhc.bigdata.spark.jobs.increment
|
|
|
|
|
|
import java.util.Date
|
|
|
|
|
|
-import com.winhc.bigdata.spark.utils.{BaseUtil, LoggingUtils, PhoenixUtil}
|
|
|
+import com.winhc.bigdata.spark.config.PhoenixConfig
|
|
|
+import com.winhc.bigdata.spark.utils.{BaseUtil, LoggingUtils}
|
|
|
import org.apache.spark.sql.SparkSession
|
|
|
|
|
|
import scala.annotation.meta.getter
|
|
|
-
|
|
|
import com.winhc.bigdata.spark.implicits.PhoenixHelper._
|
|
|
|
|
|
/**
|
|
@@ -74,8 +74,8 @@ case class CommonTableOps(s: SparkSession, sourceTable: String, dupCols: Seq[Str
|
|
|
println(s"${this.getClass.getSimpleName} ads end! " + new Date().toString)
|
|
|
|
|
|
// 写入PHX的表
|
|
|
- val DB_PHOENIX_URL = PhoenixUtil.getPhoenixJDBCUrl
|
|
|
- val connProp = PhoenixUtil.getPhoenixProperties
|
|
|
+ val DB_PHOENIX_URL = PhoenixConfig.getPhoenixJDBCUrl
|
|
|
+ val connProp = PhoenixConfig.getPhoenixProperties
|
|
|
df1.persist()
|
|
|
sql(s"""SELECT ${adsColumns.filter(!_.equals("ROWKEY")).mkString(",")},CONCAT_WS("_",c.cid,c.id) AS ROWKEY FROM t2""")
|
|
|
.save2PhoenixByJDBC(s"${phxTable}")
|
|
@@ -145,8 +145,8 @@ case class CommonTableOps(s: SparkSession, sourceTable: String, dupCols: Seq[Str
|
|
|
println(s"${this.getClass.getSimpleName} ads end! " + new Date().toString)
|
|
|
|
|
|
// 写入PHX的LIST表
|
|
|
- val DB_PHOENIX_URL = PhoenixUtil.getPhoenixJDBCUrl
|
|
|
- val connProp = PhoenixUtil.getPhoenixProperties
|
|
|
+ val DB_PHOENIX_URL = PhoenixConfig.getPhoenixJDBCUrl
|
|
|
+ val connProp = PhoenixConfig.getPhoenixProperties
|
|
|
df1.persist()
|
|
|
sql(s"SELECT ${adsListColumns.mkString(",")} FROM t2")
|
|
|
.save2PhoenixByJDBC(s"${phxListTable}")
|