Ver código fonte

法院公告

xufei 4 anos atrás
pai
commit
0c9fb3e18d

+ 6 - 0
src/main/java/com/winhc/bigdata/calc/DimScoreV2.java

@@ -287,6 +287,12 @@ public class DimScoreV2 {
         put("4","经营风险");
         put("5","法律风险");
 
+        put("基本情况","1");
+        put("经营情况","2");
+        put("资产权益","3");
+        put("经营风险","4");
+        put("法律风险","5");
+
         //小类code
         //基本情况
         put("注册资本","101");

+ 83 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/CompanyCourtAnnouncement.scala

@@ -0,0 +1,83 @@
+package com.winhc.bigdata.spark.jobs
+
+import java.util.Date
+
+import com.winhc.bigdata.spark.utils.SparkUtils
+import org.apache.spark.sql.{Row, SparkSession}
+import com.winhc.bigdata.calc.DimScoreV2
+import org.apache.spark.internal.Logging
+import com.winhc.bigdata.spark.utils.BaseUtil
+import scala.collection.mutable
+
+/**
+ * 法院公告
+ */
+object CompanyCourtAnnouncement extends Logging {
+  def main(args: Array[String]): Unit = {
+
+    var config = mutable.Map.empty[String, String]
+    val spark: SparkSession = SparkUtils.InitEnv(this.getClass.getSimpleName, config)
+    import spark.implicits._
+    import spark._
+    import org.apache.spark.sql.functions._
+    println(s"company ${this.getClass.getSimpleName} calc start! " + new Date().toString)
+
+    val company_court_announcement = "ads_company_court_announcement_list"
+    val company_court_announcement_score = "company_court_announcement_score"
+
+    val df = sql(
+      s"""
+         |SELECT  *
+         |FROM    (
+         |            SELECT  *
+         |                    ,sum(CASE  WHEN party_role = 'y' THEN 1 ELSE 0 END) OVER(PARTITION BY new_cid) AS cnt1
+         |                    ,sum(CASE  WHEN party_role = 'n' THEN 1 ELSE 0 END) OVER(PARTITION BY new_cid) AS cnt2
+         |                    ,row_number() OVER(PARTITION BY new_cid ORDER BY publish_date DESC) AS num
+         |            FROM    $company_court_announcement
+         |            WHERE   ds = '${BaseUtil.getPartion(company_court_announcement, spark)}' and new_cid is not null
+         |                    and publish_date >= '${BaseUtil.atMonthsBefore(3)}'
+         |        ) a
+         |WHERE   num = 1
+         |""".stripMargin)
+
+    df.map(r => {
+      trans(r)
+    }).toDF("id", "cid", "name", "kind", "kind_code", "project", "project_code", "type",
+      "score", "total", "extraScore")
+      .createOrReplaceTempView(s"${company_court_announcement}_tmp_view")
+
+    sql(s"insert overwrite table ${company_court_announcement_score}  select * from ${company_court_announcement}_tmp_view")
+
+    println(s"company ${this.getClass.getSimpleName} calc end! " + new Date().toString)
+    spark.stop()
+  }
+
+  def trans(r: Row) = {
+    val id = r.getAs[Long]("id")
+    val cid = r.getAs[Long]("new_cid").toString
+    val name = r.getAs[String]("new_cname")
+    val cnt1 = r.getAs[Long]("cnt1")
+    val cnt2 = r.getAs[Long]("cnt2")
+    var score = 0f
+    val total = 5f
+    val extraScore = 0f
+    var ty = ""
+    if (cnt1 == 0 && cnt2 == 0) {
+      score = 5f
+      ty = "近3个月无法院公告"
+    } else if (cnt1 > 0 && cnt2 == 0) {
+      score = 5f
+      ty = "近3个月有法院公告,均为公诉人/原告/上诉人/申请人"
+    } else if (cnt1 > cnt2) {
+      score = 3f
+      ty = "近3个月有法院公告,作为公诉人/原告/上诉人/申请人的数量大于作为被告人/被告/被上诉人/被申请人的数量"
+    } else if (cnt1 <= cnt2) {
+      score = 0f
+      ty = "近3个月有法院公告,作为被告人/被告/被上诉人/被申请人的数量大于作为公诉人/原告/上诉人/申请人的数量"
+    }
+    val kind = "法律风险"
+    val project = "法院公告"
+    (id, cid, name, kind, DimScoreV2.newsEventMap.get(kind), project, DimScoreV2.newsEventMap.get(project), ty,
+      score, total, extraScore)
+  }
+}

+ 19 - 0
src/main/scala/com/winhc/bigdata/spark/utils/BaseUtil.scala

@@ -1,5 +1,9 @@
 package com.winhc.bigdata.spark.utils
 
+import java.util.{Calendar, Date, Locale}
+import org.apache.commons.lang3.time.DateFormatUtils
+import org.apache.spark.sql.SparkSession
+
 /**
  * @Author: XuJiakai
  * @Date: 2020/6/3 18:49
@@ -7,4 +11,19 @@ package com.winhc.bigdata.spark.utils
  */
 object BaseUtil {
   def isWindows: Boolean = System.getProperty("os.name").contains("Windows")
+
+  def getPartion(t: String, @transient spark: SparkSession) = {
+    import spark.implicits._
+    import spark._
+    val sql_s = s"show partitions " + t
+    sql(sql_s).collect.toList.map(_.getString(0).split("=")(1)).last
+  }
+
+  def atMonthsBefore(n: Int, pattern: String = "yyyy-MM-dd"): String = {
+    val c = Calendar.getInstance(Locale.CHINA)
+    c.setTimeInMillis(new Date().getTime)
+    c.add(Calendar.MONTH, -1 * n)
+    DateFormatUtils.format(c.getTime.getTime, pattern)
+  }
+
 }