Selaa lähdekoodia

Merge remote-tracking branch 'origin/master'

xufei 4 vuotta sitten
vanhempi
commit
7e888e1f26

+ 3 - 3
src/main/scala/com/winhc/bigdata/spark/jobs/CompanyNameMappingPro.scala

@@ -58,9 +58,9 @@ object CompanyNameMappingPro extends Logging {
 
 
   def main(args: Array[String]): Unit = {
-    val map = mutable.Map[String, String](
-      "spark.hadoop.odps.cupid.vpc.domain.list" -> "{\"regionId\":\"cn-shanghai\",\"vpcs\":[{\"vpcId\":\"vpc-11hby9xee\",\"zones\":[{\"urls\":[{\"domain\":\"dds-uf6ff5dfd9aef3641.mongodb.rds.aliyuncs.com\",\"port\":3717},{\"domain\":\"dds-uf6ff5dfd9aef3642.mongodb.rds.aliyuncs.com\",\"port\":3717},{\"domain\":\"hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com\",\"port\":2181}]}]}]}"
-    )
+
+    import com.winhc.bigdata.spark.utils.BaseUtil.getExecutorConfigOrExit
+    val map = getExecutorConfigOrExit(args)
 
     val hbaseKVTable = "company_name_kv"
     val inputTable = "new_ods_company"

+ 1 - 2
src/main/scala/com/winhc/bigdata/spark/test/TestSparkSql.scala

@@ -19,8 +19,7 @@ object TestSparkSql extends Logging {
     val spark = SparkUtils.InitEnv("test", map)
     import spark._
     val df1 = spark.createDataFrame(Seq((1, 2, 3))).toDF("col0", "col1", "col2")
-    val df2 = spark.createDataFrame(Seq((4, 5))).toDF("col1", "col2")
-    df1.unionAll(df2).show
+    df1.show
     spark.stop()
   }