|
@@ -15,9 +15,9 @@ import scala.collection.mutable
|
|
|
|
|
|
case class export_2_es_args(tn: String, out_fields: Seq[String])
|
|
|
|
|
|
-case class export_2_es(s: SparkSession,
|
|
|
- export_args: export_2_es_args
|
|
|
- ) extends LoggingUtils with BaseFunc {
|
|
|
+case class export_company_index_2_es(s: SparkSession,
|
|
|
+ export_args: export_2_es_args
|
|
|
+ ) extends LoggingUtils with BaseFunc {
|
|
|
@(transient@getter) val spark: SparkSession = s
|
|
|
|
|
|
private val target_tab = s"winhc_ng.out_index_es_${export_args.tn}"
|
|
@@ -125,18 +125,28 @@ case class export_2_es(s: SparkSession,
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-object export_2_es {
|
|
|
+object export_company_index_2_es {
|
|
|
+ val as = Seq(
|
|
|
+ export_2_es_args("company_court_open_announcement"
|
|
|
+ , "rowkey,defendant_info,plaintiff_info,start_date,case_no,case_reason".split(","))
|
|
|
+ )
|
|
|
+
|
|
|
+
|
|
|
+ def run(spark: SparkSession, tn: String): Unit = {
|
|
|
+ export_company_index_2_es(spark, export_args = as.find(p => p.tn.equals(tn)).get)
|
|
|
+ .calc()
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
+ val Array(tn) = args
|
|
|
val config = EsConfig.getEsConfigMap ++ mutable.Map(
|
|
|
"spark.hadoop.odps.project.name" -> "winhc_ng",
|
|
|
"spark.debug.maxToStringFields" -> "200",
|
|
|
"spark.hadoop.odps.spark.local.partition.amt" -> "10"
|
|
|
)
|
|
|
val spark = SparkUtils.InitEnv(this.getClass.getName, config)
|
|
|
-
|
|
|
- export_2_es(spark, export_args = export_2_es_args("company_court_open_announcement", "rowkey,defendant_info,plaintiff_info,start_date,case_no,case_reason".split(",")))
|
|
|
- .calc()
|
|
|
-
|
|
|
+ run(spark, tn)
|
|
|
|
|
|
spark.stop()
|
|
|
}
|