|
@@ -5,7 +5,7 @@ import java.util.Date
|
|
|
import com.winhc.bigdata.spark.config.EsConfig
|
|
|
import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
|
|
|
import com.winhc.bigdata.spark.utils.ReflectUtils.getClazz
|
|
|
-import com.winhc.bigdata.spark.utils.{LoggingUtils, SparkUtils}
|
|
|
+import com.winhc.bigdata.spark.utils.{DateUtils, LoggingUtils, SparkUtils}
|
|
|
import org.apache.commons.lang3.time.DateFormatUtils
|
|
|
import org.apache.spark.internal.Logging
|
|
|
import org.apache.spark.sql.types.StringType
|
|
@@ -30,7 +30,7 @@ object CompanyDynamic {
|
|
|
@(transient@getter) val spark: SparkSession = s
|
|
|
|
|
|
private val env = "dev"
|
|
|
- val targetTab = "xjk_tmp_company_dynamic"
|
|
|
+ val targetTab = "ads_company_dynamic"
|
|
|
|
|
|
def init(): Unit = {
|
|
|
sql(
|
|
@@ -143,12 +143,13 @@ object CompanyDynamic {
|
|
|
None
|
|
|
}
|
|
|
else {
|
|
|
- result.map(res => Row(res._1, res._2, res._3, res._4, res._5, res._6, res._7, res._8, res._9, res._10, DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss")))
|
|
|
+ result.map(res => Row(s"${res._1}-${res._8}-${DateUtils.toUnixTimestamp(date = res._6)}", res._1, res._2, res._3, res._4, res._5, res._6, res._7, res._8, res._9, res._10, DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss")))
|
|
|
}
|
|
|
})
|
|
|
|
|
|
val schema = getSchema(ListMap(
|
|
|
- "cid" -> StringType
|
|
|
+ "id" -> StringType
|
|
|
+ , "cid" -> StringType
|
|
|
, "cname" -> StringType
|
|
|
, "info_type" -> StringType
|
|
|
, "rta_desc" -> StringType
|