浏览代码

add appName

许家凯 5 年之前
父节点
当前提交
b5c40d8963

+ 1 - 1
src/main/scala/com/winhc/bigdata/spark/jobs/CompanyInfoCalculator.scala

@@ -10,7 +10,7 @@ object CompanyInfoCalculator extends CompanyMapping {
   private val LOG = LogFactory.getLog(this.getClass)
 
   def main(args: Array[String]): Unit = {
-    val spark: SparkSession = SparkUtils.InitEnv
+    val spark: SparkSession = SparkUtils.InitEnv("CompanyInfoCalculator")
 
     import spark._
     LOG.info("company calc start!   ")

+ 3 - 3
src/main/scala/com/winhc/bigdata/spark/utils/SparkUtils.scala

@@ -4,10 +4,10 @@ import org.apache.spark.sql.SparkSession
 
 object SparkUtils {
 
-  def InitEnv = {
+  def InitEnv(appName: String) = {
     val spark = SparkSession
       .builder()
-      .appName(this.getClass.getSimpleName)
+      .appName(appName)
       .config("spark.sql.broadcastTimeout", 20 * 60)
       .config("spark.sql.crossJoin.enabled", true)
       .config("odps.exec.dynamic.partition.mode", "nonstrict")
@@ -18,7 +18,7 @@ object SparkUtils {
       .config("spark.hadoop.odps.end.point", "http://service.cn.maxcompute.aliyun.com/api")
       .config("spark.hadoop.odps.runtime.end.point", "http://service.cn.maxcompute.aliyun-inc.com/api")
 
-    if(System.getProperty("os.name").contains("Windows")){
+    if (System.getProperty("os.name").contains("Windows")) {
       spark.master("local[*]")
     }
     spark.getOrCreate()