|
@@ -1,6 +1,7 @@
|
|
|
package com.winhc.bigdata.spark.utils
|
|
|
|
|
|
import com.winhc.bigdata.spark.config.HBaseConfig
|
|
|
+import com.winhc.bigdata.spark.udf.BaseFunc
|
|
|
import org.apache.hadoop.hbase.client.Put
|
|
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
|
|
|
import org.apache.hadoop.hbase.util.Bytes
|
|
@@ -20,7 +21,7 @@ case class CompanyIncSummary(s: SparkSession,
|
|
|
tableName: String, //主表名(不加前辍)
|
|
|
cidField: String, // 公司id fieldName
|
|
|
dupliCols: Seq[String] //去重主键
|
|
|
- ) extends LoggingUtils {
|
|
|
+ ) extends LoggingUtils with BaseFunc {
|
|
|
@(transient@getter) val spark: SparkSession = s
|
|
|
private val f_bytes: Array[Byte] = Bytes.toBytes("F")
|
|
|
private val name_bytes: Array[Byte] = Bytes.toBytes(tableName.toUpperCase)
|
|
@@ -30,6 +31,7 @@ case class CompanyIncSummary(s: SparkSession,
|
|
|
)
|
|
|
|
|
|
def calc(): Unit = {
|
|
|
+ cleanup()
|
|
|
val ads_table = s"${project}.ads_$tableName" //存量ads表
|
|
|
val inc_ads_table = s"${project}.inc_ads_$tableName"
|
|
|
|