Browse Source

修改groupid 精简项目

许家凯 4 years ago
parent
commit
0d5b27e6c1

+ 0 - 2
.gitignore

@@ -23,5 +23,3 @@ metastore_db/
 derby.log
 log4j.properties
 dependency-reduced-pom.xml
-*.log
-appstatus

+ 1 - 1
pom.xml

@@ -4,7 +4,7 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
-    <groupId>com.aliyun.odps.myJava</groupId>
+    <groupId>com.winhc</groupId>
     <artifactId>Spark_Max</artifactId>
     <version>1.0</version>
 

+ 1 - 3
src/main/scala/com/winhc/bigdata/spark/jobs/CompanyNameMapping.scala

@@ -72,9 +72,7 @@ object CompanyNameMapping extends Logging {
     import spark._
     val df = sql(s"select cid,name,current_cid from $inputTable")
 
-    import com.alibaba.dcm.DnsCacheManipulator
-    DnsCacheManipulator.setDnsCache("hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com", "47.101.251.157")
-    val jobConf = SparkUtils.HBaseOutputJobConf(hbaseKVTable)
+   val jobConf = SparkUtils.HBaseOutputJobConf(hbaseKVTable)
     df.rdd.map(row => {
       val id = row(0).asInstanceOf[Long].toString
       val name = row(1).asInstanceOf[String]

+ 8 - 22
src/main/scala/com/winhc/bigdata/spark/utils/SparkUtils.scala

@@ -11,8 +11,14 @@ object SparkUtils {
 
   def HBaseOutputJobConf(outputTable: String): JobConf = {
     val config = HBaseConfiguration.create()
-//    val zkAddress = "hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com"
-    val zkAddress = "hb-proxy-pub-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com"
+    var zkAddress: String = null
+    if (System.getProperty("os.name").contains("Windows")) {
+      zkAddress = "hb-proxy-pub-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com"
+      import com.alibaba.dcm.DnsCacheManipulator
+      DnsCacheManipulator.setDnsCache("hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com", "47.101.251.157")
+    } else {
+      zkAddress = "hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com"
+    }
     config.set(HConstants.ZOOKEEPER_QUORUM, zkAddress);
 
     val jobConf = new JobConf(config)
@@ -51,24 +57,4 @@ object SparkUtils {
     }
     spark.getOrCreate()
   }
-
-  def InitEnvRaw(appName: String) = {
-    val spark = SparkSession
-      .builder()
-      .appName(appName)
-      .config("spark.sql.broadcastTimeout", 20 * 60)
-      .config("spark.sql.crossJoin.enabled", true)
-      .config("odps.exec.dynamic.partition.mode", "nonstrict")
-      .config("spark.hadoop.odps.project.name", "winhc_test_dev")
-      .config("spark.hadoop.odps.access.id", "LTAI4G4n7pAW8tUbJVkkZQPD")
-      .config("spark.hadoop.odps.access.key", "uNJOBskzcDqHq1TYG3m2rebR4c1009")
-      .config("spark.sql.catalogImplementation", "odps")
-      .config("spark.hadoop.odps.end.point", "http://service.cn.maxcompute.aliyun.com/api")
-      .config("spark.hadoop.odps.runtime.end.point", "http://service.cn.maxcompute.aliyun-inc.com/api")
-
-    if (System.getProperty("os.name").contains("Windows")) {
-      spark.master("local[*]")
-    }
-    spark
-  }
 }