|
@@ -1,42 +1,68 @@
|
|
|
package com.winhc.bigdata.spark.jobs
|
|
|
|
|
|
import java.util.Date
|
|
|
-import com.winhc.bigdata.spark.utils.{BaseUtil, SparkUtils}
|
|
|
+
|
|
|
+import com.winhc.bigdata.spark.utils.{BaseUtil, LoggingUtils, SparkUtils}
|
|
|
import org.apache.spark.sql.{Row, SparkSession}
|
|
|
import com.winhc.bigdata.calc.DimScoreV2
|
|
|
+import com.winhc.bigdata.spark.jobs.CompanyCourtAnnouncement.tabMapping
|
|
|
+
|
|
|
+import scala.annotation.meta.getter
|
|
|
import scala.collection.mutable
|
|
|
|
|
|
/**
|
|
|
* 法院公告,开庭公告,立案信息
|
|
|
*/
|
|
|
+
|
|
|
object CompanyCourtAnnouncement {
|
|
|
|
|
|
- val tabMapping = Map("ads_company_court_announcement_list" -> ("1", "publish_date", "法律风险", "法院公告"), //法院公告
|
|
|
- "ads_company_court_open_announcement_list" -> ("2", "start_date", "法律风险", "开庭公告"), //开庭公告
|
|
|
- "ads_company_court_register_list" -> ("3", "filing_date", "法律风险", "立案信息") //立案信息
|
|
|
- )
|
|
|
+ val tabMapping: Map[String, (String, String, String, String)] =
|
|
|
+ Map("ads_company_court_announcement_list" -> ("1", "publish_date", "法律风险", "法院公告"), //法院公告
|
|
|
+ "ads_company_court_open_announcement_list" -> ("2", "start_date", "法律风险", "开庭公告"), //开庭公告
|
|
|
+ "ads_company_court_register_list" -> ("3", "filing_date", "法律风险", "立案信息") //立案信息
|
|
|
+ )
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
|
|
+ val (sourceTable, flag, time, kind, project) = valid(args)
|
|
|
+
|
|
|
+ var config = mutable.Map.empty[String, String]
|
|
|
+
|
|
|
+ val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
|
|
|
+
|
|
|
+ new CompanyCourtAnnouncement(spark, sourceTable, flag, time, kind, project).calc()
|
|
|
+ spark.stop()
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ def valid(args: Array[String]) = {
|
|
|
if (args.length != 1) {
|
|
|
println("请输入要计算的table!!!! ")
|
|
|
sys.exit(-1)
|
|
|
}
|
|
|
-
|
|
|
val sourceTable = args(0)
|
|
|
|
|
|
- var config = mutable.Map.empty[String, String]
|
|
|
- val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
|
|
|
- import spark.implicits._
|
|
|
- import spark._
|
|
|
- import org.apache.spark.sql.functions._
|
|
|
- println(s"company ${this.getClass.getSimpleName} calc start! " + new Date().toString)
|
|
|
-
|
|
|
val (flag, time, kind, project) = tabMapping.getOrElse(sourceTable, ("", "", "", ""))
|
|
|
if (flag.isEmpty || time.isEmpty || kind.isEmpty || project.isEmpty) {
|
|
|
println("输入表不存在!!! ")
|
|
|
sys.exit(-1)
|
|
|
}
|
|
|
+ (sourceTable, flag, time, kind, project)
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+case class CompanyCourtAnnouncement(s: SparkSession, sourceTable: String,
|
|
|
+ flag: String, time: String, kind: String, project: String
|
|
|
+ ) extends LoggingUtils {
|
|
|
+
|
|
|
+ @(transient@getter) val spark: SparkSession = s
|
|
|
+
|
|
|
+ import spark.implicits._
|
|
|
+ import spark._
|
|
|
+ import org.apache.spark.sql.functions._
|
|
|
+
|
|
|
+ def calc(): Unit = {
|
|
|
+ println(s"company ${this.getClass.getSimpleName} calc start! " + new Date().toString)
|
|
|
|
|
|
val df = sql(
|
|
|
s"""
|
|
@@ -64,9 +90,9 @@ object CompanyCourtAnnouncement {
|
|
|
sql(s"insert overwrite table ${sourceTable}_score select * from ${sourceTable}_tmp_view")
|
|
|
|
|
|
println(s"company ${this.getClass.getSimpleName} calc end! " + new Date().toString)
|
|
|
- spark.stop()
|
|
|
}
|
|
|
|
|
|
+
|
|
|
def trans(r: Row, flag: String, kind: String, prpject: String) = {
|
|
|
val id = r.getAs[Long]("id")
|
|
|
val cid = r.getAs[Long]("new_cid").toString
|