|
@@ -34,8 +34,6 @@ object CompanyIntellectualsScore {
|
|
|
"spark.hadoop.odps.project.name" -> "winhc_eci_dev",
|
|
|
"spark.hadoop.odps.spark.local.partition.amt" -> "10"
|
|
|
)
|
|
|
- config.+=("spark.hadoop.odps.project.name" -> "winhc_eci_dev")
|
|
|
- println(config)
|
|
|
val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
|
|
|
|
|
|
println(s"company ${this.getClass.getSimpleName} calc start! " + new Date().toString)
|
|
@@ -57,14 +55,14 @@ object CompanyIntellectualsScore {
|
|
|
|
|
|
def valid(args: Array[String]) = {
|
|
|
if (args.length != 2) {
|
|
|
- println("请输入要计算的 工作空间,table !!!! ")
|
|
|
+ println("please insert namespace,table !!!! ")
|
|
|
sys.exit(-1)
|
|
|
}
|
|
|
val Array(namespace, sourceTable) = args
|
|
|
|
|
|
val (flag, time, kind, project) = tabMapping.getOrElse(sourceTable, ("", "", "", ""))
|
|
|
if (flag.isEmpty || time.isEmpty || kind.isEmpty || project.isEmpty) {
|
|
|
- println("输入表不存在!!! ")
|
|
|
+ println("table not find!!! ")
|
|
|
sys.exit(-1)
|
|
|
}
|
|
|
|
|
@@ -75,7 +73,7 @@ object CompanyIntellectualsScore {
|
|
|
def start(spark: SparkSession, namespace: String, sourceTable: String, tableView: String): Unit = {
|
|
|
val (flag, time, kind, project) = tabMapping.getOrElse(sourceTable, ("", "", "", ""))
|
|
|
if (flag.isEmpty || time.isEmpty || kind.isEmpty || project.isEmpty) {
|
|
|
- println("输入模型计算表不存在!!! ")
|
|
|
+ println("model table not find !!! ")
|
|
|
sys.exit(-1)
|
|
|
}
|
|
|
|
|
@@ -100,6 +98,7 @@ case class CompanyIntellectualsScore(s: SparkSession, sourceTable: String, table
|
|
|
import spark.implicits._
|
|
|
|
|
|
def calc(): Unit = {
|
|
|
+ println(s"${this.getClass.getSimpleName} calc start! " + new Date().toString)
|
|
|
//val targetTable = "ads_company_total_score"
|
|
|
val adsTable = namespace + "ads_" + sourceTable
|
|
|
val incAdsTable = namespace + "inc_ads_" + sourceTable
|
|
@@ -150,12 +149,12 @@ case class CompanyIntellectualsScore(s: SparkSession, sourceTable: String, table
|
|
|
"score", "total", "extraScore")
|
|
|
.createOrReplaceTempView(s"t1_view")
|
|
|
|
|
|
- logger.info(
|
|
|
- s"""
|
|
|
- |- - - - - - - - - - - - - - - - - - - - - - - - -
|
|
|
- |${showString(sql(s"select * from t1_view"))}
|
|
|
- |- - - - - - - - - - - - - - - - - - - - - - - - -
|
|
|
- """.stripMargin)
|
|
|
+ // logger.info(
|
|
|
+ // s"""
|
|
|
+ // |- - - - - - - - - - - - - - - - - - - - - - - - -
|
|
|
+ // |${showString(sql(s"select * from t1_view"))}
|
|
|
+ // |- - - - - - - - - - - - - - - - - - - - - - - - -
|
|
|
+ // """.stripMargin)
|
|
|
|
|
|
sql(s"insert overwrite table ${targetTable}${apptab} " +
|
|
|
s"partition (ds='${ds}') select * from t1_view")
|
|
@@ -163,13 +162,16 @@ case class CompanyIntellectualsScore(s: SparkSession, sourceTable: String, table
|
|
|
val dataFrame = sql(
|
|
|
s"""
|
|
|
|select
|
|
|
- |CONCAT_WS('_',cid,id) AS rowkey,
|
|
|
+ |CONCAT_WS('_',cid,project_code) AS rowkey,
|
|
|
|id,cid,kind,kind_code,project,project_code,type,score,total,extraScore
|
|
|
|from t1_view
|
|
|
|""".stripMargin)
|
|
|
|
|
|
//同步hbase
|
|
|
- Maxcomputer2Hbase(dataFrame,"COMPANY_SCORE").syn()
|
|
|
+ if ("1".equals(tp)) { //存量计算不用同步hbase
|
|
|
+ Maxcomputer2Hbase(dataFrame, "COMPANY_SCORE").syn()
|
|
|
+ }
|
|
|
+ println(s"${this.getClass.getSimpleName} calc end! " + new Date().toString)
|
|
|
}
|
|
|
|
|
|
def trans(r: Row, flag: String, kind: String, prpject: String) = {
|