瀏覽代碼

利好消息

许家凯 4 年之前
父節點
當前提交
5864d5dc50

+ 7 - 6
src/main/scala/com/winhc/bigdata/spark/const/CaseChanceConst.scala

@@ -15,6 +15,7 @@ object CaseChanceConst {
     , "company_certificate" -> "3"
     , "company_copyright_works_list" -> "3"
     , "company_copyright_reg_list" -> "3"
+    , "company_employment" -> "3"
     , "" -> "4"
   )
 
@@ -24,12 +25,12 @@ object CaseChanceConst {
     /*"" -> "3-1" //企业增资
     , "" -> "3-2" //企业新增对外投资
     , "" -> "3-3" //新增招投标
-    , "" -> "3-4" //新增招聘
-    , "" -> "3-5" //地块公示
-    , "" -> "3-6" //购地信息
-    , "" -> "3-7" //土地转让
-
-    ,*/ "company_tm" -> "3-8" //知识产权-商标
+    , */ "company_employment" -> "3-4" //新增招聘
+    /* , "" -> "3-5" //地块公示
+     , "" -> "3-6" //购地信息
+     , "" -> "3-7" //土地转让
+    */
+    , "company_tm" -> "3-8" //知识产权-商标
     , "company_patent_list" -> "3-9" //专利
     , "company_certificate" -> "3-10" //资质证书   X
     , "company_copyright_works_list" -> "3-11" //作品著作权

+ 6 - 6
src/main/scala/com/winhc/bigdata/spark/jobs/CompanyCourtAnnouncement.scala

@@ -5,22 +5,22 @@ import java.util.Collections
 
 import com.alibaba.fastjson.JSON
 import com.winhc.bigdata.spark.udf.{BaseFunc, CompanyMapping, JsonSerializable}
-import com.winhc.bigdata.spark.utils.{BaseUtil, LoggingUtils, SparkUtils}
 import com.winhc.bigdata.spark.utils.BaseUtil._
-import org.apache.spark.sql.{DataFrame, Row, SparkSession}
-
-import scala.annotation.meta.getter
-import scala.collection.mutable
-import com.winhc.bigdata.spark.utils.EsRestUtils.{getIndexResult, getRestClient}
+import com.winhc.bigdata.spark.utils.EsRestUtils.getRestClient
+import com.winhc.bigdata.spark.utils.{BaseUtil, LoggingUtils, SparkUtils}
 import org.apache.commons.lang3.StringUtils
 import org.apache.http.entity.ContentType
 import org.apache.http.nio.entity.NStringEntity
 import org.apache.http.util.EntityUtils
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.functions.col
+import org.apache.spark.sql.{Row, SparkSession}
 import org.elasticsearch.client.RestClient
 import org.json4s.jackson.Json
 
+import scala.annotation.meta.getter
+import scala.collection.mutable
+
 /**
  * @Description: 法院公告
  * @author π

+ 5 - 3
src/main/scala/com/winhc/bigdata/spark/jobs/chance/ChangeExtract.scala

@@ -157,11 +157,12 @@ object ChangeExtract {
   }
 
 
-  // winhc_eci_dev company_tm rowkey 20200707 rowkey,status_new
+  // winhc_eci_dev company_tm rowkey 20200715 status_new
   // winhc_eci_dev company_patent_list rowkey 20200715 lprs
-  // winhc_eci_dev company_certificate rowkey 20200707 lprs
+  // winhc_eci_dev company_certificate rowkey 20200707 type
   // winhc_eci_dev company_copyright_works_list rowkey 20200715 type
-  // winhc_eci_dev company_copyright_reg_list rowkey 20200707 lprs
+  // winhc_eci_dev company_copyright_reg_list rowkey 20200715 version
+  // winhc_eci_dev company_employment rowkey 20200630 source
 
 
   // winhc_eci_dev company cid 20200630 legal_entity_id,reg_location,business_scope,reg_status,reg_capital,emails,phones
@@ -173,6 +174,7 @@ object ChangeExtract {
       "spark.hadoop.odps.spark.local.partition.amt" -> "10"
     )
 
+
     val spark = SparkUtils.InitEnv("ChangeExtract", config)
 
     ChangeExtractHandle(spark, project, tableName, rowkey, inc_ds, pf.split(",")).calc

+ 2 - 2
src/main/scala/com/winhc/bigdata/spark/jobs/chance/eci_good_news.scala

@@ -131,14 +131,14 @@ object eci_good_news {
 
 
   def main(args: Array[String]): Unit = {
-    //    val Array(ds) = args
+        val Array(ds) = args
 
     val config = EsConfig.getEsConfigMap ++ mutable.Map(
       "spark.hadoop.odps.project.name" -> "winhc_eci_dev",
       "spark.hadoop.odps.spark.local.partition.amt" -> "10"
     )
     val spark = SparkUtils.InitEnv("eci_good_news", config)
-    eci_good_news_handle(spark, "20200707").company_ip()
+    eci_good_news_handle(spark, ds).company_ip()
     spark.stop()
   }