Browse Source

添加环境切换

许家凯 4 years ago
parent
commit
6f69375a53

+ 6 - 0
pom.xml

@@ -264,6 +264,12 @@
             <groupId>com.google.protobuf</groupId>
             <groupId>com.google.protobuf</groupId>
             <version>3.3.0</version>
             <version>3.3.0</version>
         </dependency>
         </dependency>
+
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.17</version>
+        </dependency>
     </dependencies>
     </dependencies>
 
 
     <build>
     <build>

+ 5 - 0
src/main/resources/dns-cache.properties

@@ -0,0 +1,5 @@
+hb-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com=47.101.250.84
+hb-uf6m8e1nu4ivp06m5-master1-001.hbase.rds.aliyuncs.com=47.101.253.8
+hb-uf6m8e1nu4ivp06m5-master2-001.hbase.rds.aliyuncs.com=106.15.29.178
+hb-uf6m8e1nu4ivp06m5-master3-001.hbase.rds.aliyuncs.com=106.15.29.202
+hb-uf6m8e1nu4ivp06m5-core-001.hbase.rds.aliyuncs.com=106.15.29.252

+ 34 - 0
src/main/resources/env.yaml

@@ -0,0 +1,34 @@
+profile:
+  activate: dev
+
+---
+env:
+  name: dev-local
+  config:
+    es.nodes: es-cn-0pp0r32zf000ipovd.public.elasticsearch.aliyuncs.com
+    zk.address: hb-proxy-pub-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com:2181
+    phoenix.address: http://hb-proxy-pub-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com:8765
+
+---
+env:
+  name: dev-remote
+  config:
+    es.nodes: es-cn-0pp0r32zf000ipovd.elasticsearch.aliyuncs.com
+    zk.address: hb-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com:2181
+    phoenix.address: http://hb-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com:8765
+
+---
+env:
+  name: prod-local
+  config:
+    es.nodes: es-cn-0pp0r32zf000ipovd.public.elasticsearch.aliyuncs.com
+    zk.address: hb-proxy-pub-uf6m8e1nu4ivp06m5-master1-001.hbase.rds.aliyuncs.com:2181,hb-proxy-pub-uf6m8e1nu4ivp06m5-master2-001.hbase.rds.aliyuncs.com:2181,hb-proxy-pub-uf6m8e1nu4ivp06m5-master3-001.hbase.rds.aliyuncs.com:2181
+    phoenix.address: http://hb-uf6m8e1nu4ivp06m5-proxy-phoenix-pub.hbase.rds.aliyuncs.com:8765
+
+---
+env:
+  name: prod-remote
+  config:
+    es.nodes: es-cn-0pp0r32zf000ipovd.elasticsearch.aliyuncs.com
+    zk.address: hb-uf6m8e1nu4ivp06m5-master1-001.hbase.rds.aliyuncs.com:2181,hb-uf6m8e1nu4ivp06m5-master2-001.hbase.rds.aliyuncs.com:2181,hb-uf6m8e1nu4ivp06m5-master3-001.hbase.rds.aliyuncs.com:2181
+    phoenix.address: http://hb-uf6m8e1nu4ivp06m5-proxy-phoenix.hbase.rds.aliyuncs.com:8765

+ 4 - 1
src/main/scala/com/winhc/bigdata/spark/const/BaseConst.scala

@@ -1,11 +1,14 @@
 package com.winhc.bigdata.spark.const
 package com.winhc.bigdata.spark.const
 
 
+import org.apache.hadoop.hbase.util.Bytes
+
 /**
 /**
  * @Author: XuJiakai
  * @Author: XuJiakai
  * @Date: 2020/6/3 19:28
  * @Date: 2020/6/3 19:28
  * @Description:
  * @Description:
  */
  */
 object BaseConst {
 object BaseConst {
-//  val PHOENIX_TABLE_NAME_FLAG = "\001"
   val DB_PHOENIX_DRIVER = "org.apache.phoenix.queryserver.client.Driver"
   val DB_PHOENIX_DRIVER = "org.apache.phoenix.queryserver.client.Driver"
+
+  val F_BYTES: Array[Byte] = Bytes.toBytes("F")
 }
 }

+ 63 - 0
src/main/scala/com/winhc/bigdata/spark/const/EnvConst.scala

@@ -1,10 +1,73 @@
 package com.winhc.bigdata.spark.const
 package com.winhc.bigdata.spark.const
 
 
+import java.util
+
+import com.winhc.bigdata.spark.utils.BaseUtil
+import org.yaml.snakeyaml.Yaml
+
+import scala.beans.BeanProperty
+import scala.collection.mutable
+
 /**
 /**
  * @Author: XuJiakai
  * @Author: XuJiakai
  * @Date: 2020/6/9 14:24
  * @Date: 2020/6/9 14:24
  * @Description:
  * @Description:
  */
  */
 object EnvConst {
 object EnvConst {
+  private val yaml = new Yaml().loadAll(getClass.getResourceAsStream("/env.yaml"))
+    .iterator()
+  var envName: String = null
+  var envs: List[EnvConst] = List()
+
+  import scala.collection.JavaConversions._
+  import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+
+  if (isWindows) {
+    import com.alibaba.dcm.DnsCacheManipulator
+    DnsCacheManipulator.loadDnsCacheConfig();
+  }
+
+  private var i = 0
+  while (yaml.hasNext) {
+    val o = yaml.next()
+
+    if (i == 0) {
+      val m1 = o.asInstanceOf[util.HashMap[String, util.HashMap[String, String]]]
+      envName = mapAsScalaMap(m1)("profile")("activate")
+    } else {
+      val m2 = o.asInstanceOf[util.HashMap[String, util.HashMap[String, Object]]]
+      val name = m2("env")("name").asInstanceOf[String]
+      val config = mapAsScalaMap(m2("env")("config").asInstanceOf[util.HashMap[String, String]])
+
+      envs = envs :+ EnvConst(name, config)
+    }
+    i += 1
+  }
+
+
+  def getEnv(envName: String): EnvConst = {
+    val map = envs.map(e => {
+      (e.getName, e)
+    }).toMap
+    if (!map.contains(envName)) {
+      println("env.yaml is not contains " + envName)
+      sys.exit(-88)
+    }
+    map(envName)
+  }
+
+  def getEnv(): EnvConst = {
+    val key = envName + "-" + (if (BaseUtil.isWindows) "local" else "remote")
+    getEnv(key)
+  }
+}
 
 
+case class EnvConst(@BeanProperty var name: String,
+                    @BeanProperty var config: mutable.Map[String, String]) extends Serializable {
+  def getValue(key: String): String = {
+    if (!config.contains(key)) {
+      throw new RuntimeException(s"$name is not contains $key. please check env.yaml!")
+    }
+    config(key)
+  }
 }
 }

+ 15 - 0
src/main/scala/com/winhc/bigdata/spark/test/YamlTest.scala

@@ -0,0 +1,15 @@
+package com.winhc.bigdata.spark.test
+
+import com.winhc.bigdata.spark.const.EnvConst
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/6/29 13:51
+ * @Description:
+ */
+object YamlTest {
+  def main(args: Array[String]): Unit = {
+    val env = EnvConst.getEnv("dev-remote")
+    println(env.getValue("es.node"))
+  }
+}

+ 2 - 2
src/main/scala/com/winhc/bigdata/spark/utils/EsUtils.scala

@@ -1,6 +1,6 @@
 package com.winhc.bigdata.spark.utils
 package com.winhc.bigdata.spark.utils
 
 
-import com.winhc.bigdata.spark.utils.BaseUtil._
+import com.winhc.bigdata.spark.const.EnvConst
 
 
 import scala.collection.mutable
 import scala.collection.mutable
 
 
@@ -15,7 +15,7 @@ object EsUtils {
     val map = mutable.Map(
     val map = mutable.Map(
       "es.nodes.wan.only" -> "true",
       "es.nodes.wan.only" -> "true",
       "es.internal.es.version" -> "5.5.3",
       "es.internal.es.version" -> "5.5.3",
-      "es.nodes" -> (if (isWindows) "es-cn-0pp0r32zf000ipovd.public.elasticsearch.aliyuncs.com" else "es-cn-0pp0r32zf000ipovd.elasticsearch.aliyuncs.com"),
+      "es.nodes" -> EnvConst.getEnv().getValue("es.nodes"),
       "es.port" -> "9200",
       "es.port" -> "9200",
       "es.index.auto.create" -> "true",
       "es.index.auto.create" -> "true",
       "es.net.http.auth.user" -> "elastic",
       "es.net.http.auth.user" -> "elastic",

+ 2 - 15
src/main/scala/com/winhc/bigdata/spark/utils/HBaseUtils.scala

@@ -1,6 +1,6 @@
 package com.winhc.bigdata.spark.utils
 package com.winhc.bigdata.spark.utils
 
 
-import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+import com.winhc.bigdata.spark.const.EnvConst
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.hbase.mapred.TableOutputFormat
 import org.apache.hadoop.hbase.mapred.TableOutputFormat
 import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants}
 import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants}
@@ -14,20 +14,7 @@ import org.apache.hadoop.mapred.JobConf
 object HBaseUtils {
 object HBaseUtils {
   def getHbaseConf(): Configuration = {
   def getHbaseConf(): Configuration = {
     val config = HBaseConfiguration.create()
     val config = HBaseConfiguration.create()
-    var zkAddress: String = null
-    if (isWindows) {
-      zkAddress = "hb-proxy-pub-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com"
-      import com.alibaba.dcm.DnsCacheManipulator
-      DnsCacheManipulator.setDnsCache("hb-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com", "47.101.250.84")
-    } else {
-      zkAddress = "hb-uf6m8e1nu4ivp06m5-master1-001.hbase.rds.aliyuncs.com,hb-uf6m8e1nu4ivp06m5-master2-001.hbase.rds.aliyuncs.com,hb-uf6m8e1nu4ivp06m5-master3-001.hbase.rds.aliyuncs.com"
-//      zkAddress = "hb-uf63a7d09rpl8mcvm-001.hbase.rds.aliyuncs.com"
-
-      //      zkAddress = "ld-uf6717qu3qh1t80z8-proxy-hbaseue.hbaseue.rds.aliyuncs.com:30020"
-      //      config.set("hbase.client.username", "root");
-      //      config.set("hbase.client.password", "root");
-      //      config.set("hbase.client.connection.impl", "org.apache.hadoop.hbase.client.AliHBaseUEClusterConnection");
-    }
+    val zkAddress: String = EnvConst.getEnv().getValue("zk.address")
     config.set(HConstants.ZOOKEEPER_QUORUM, zkAddress);
     config.set(HConstants.ZOOKEEPER_QUORUM, zkAddress);
     config
     config
   }
   }

+ 3 - 15
src/main/scala/com/winhc/bigdata/spark/utils/PhoenixUtil.scala

@@ -2,8 +2,7 @@ package com.winhc.bigdata.spark.utils
 
 
 import java.util.Properties
 import java.util.Properties
 
 
-import com.winhc.bigdata.spark.const.BaseConst
-import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+import com.winhc.bigdata.spark.const.{BaseConst, EnvConst}
 
 
 /**
 /**
  * @Author: XuJiakai
  * @Author: XuJiakai
@@ -27,12 +26,7 @@ object PhoenixUtil {
 
 
 
 
   def getPhoenixJDBCUrl: String = {
   def getPhoenixJDBCUrl: String = {
-    var queryServerAddress: String = null
-    if (isWindows) {
-      queryServerAddress = "http://hb-uf6m8e1nu4ivp06m5-proxy-phoenix-pub.hbase.rds.aliyuncs.com:8765"
-    } else {
-      queryServerAddress = "http://hb-uf6m8e1nu4ivp06m5-proxy-phoenix.hbase.rds.aliyuncs.com:8765"
-    }
+    val queryServerAddress: String = EnvConst.getEnv().getValue("phoenix.address")
     val url = "jdbc:phoenix:thin:url=" + queryServerAddress + ";serialization=PROTOBUF"
     val url = "jdbc:phoenix:thin:url=" + queryServerAddress + ";serialization=PROTOBUF"
     url
     url
   }
   }
@@ -50,12 +44,6 @@ object PhoenixUtil {
 
 
 
 
   def getPhoenixOptions(tableName: String): Map[String, String] = {
   def getPhoenixOptions(tableName: String): Map[String, String] = {
-    if (isWindows) {
-      import com.alibaba.dcm.DnsCacheManipulator
-      DnsCacheManipulator.setDnsCache("hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com", "47.101.251.157")
-      Map("table" -> tableName, "zkUrl" -> "hb-proxy-pub-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com:2181")
-    } else {
-      Map("table" -> tableName, "zkUrl" -> "hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com:2181")
-    }
+    Map("table" -> tableName, "zkUrl" -> EnvConst.getEnv().getValue("zk.address"))
   }
   }
 }
 }