|
@@ -1,45 +1,11 @@
|
|
|
package com.winhc.bigdata.spark.utils
|
|
|
|
|
|
import com.winhc.bigdata.spark.utils.BaseUtil._
|
|
|
-import org.apache.hadoop.hbase.mapred.TableOutputFormat
|
|
|
-import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants}
|
|
|
-import org.apache.hadoop.mapred.JobConf
|
|
|
import org.apache.spark.sql.SparkSession
|
|
|
|
|
|
import scala.collection.mutable
|
|
|
|
|
|
object SparkUtils {
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- def PhoenixOptions(tableName: String): Map[String, String] = {
|
|
|
- if (isWindows) {
|
|
|
- import com.alibaba.dcm.DnsCacheManipulator
|
|
|
- DnsCacheManipulator.setDnsCache("hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com", "47.101.251.157")
|
|
|
- Map("table" -> tableName, "zkUrl" -> "hb-proxy-pub-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com:2181")
|
|
|
- } else {
|
|
|
- Map("table" -> tableName, "zkUrl" -> "hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com:2181")
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- def HBaseOutputJobConf(outputTable: String): JobConf = {
|
|
|
- val config = HBaseConfiguration.create()
|
|
|
- var zkAddress: String = null
|
|
|
- if (isWindows) {
|
|
|
- zkAddress = "hb-proxy-pub-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com"
|
|
|
- import com.alibaba.dcm.DnsCacheManipulator
|
|
|
- DnsCacheManipulator.setDnsCache("hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com", "47.101.251.157")
|
|
|
- } else {
|
|
|
- zkAddress = "hb-uf6as8i6h85k02092-001.hbase.rds.aliyuncs.com"
|
|
|
- }
|
|
|
- config.set(HConstants.ZOOKEEPER_QUORUM, zkAddress);
|
|
|
-
|
|
|
- val jobConf = new JobConf(config)
|
|
|
- jobConf.setOutputFormat(classOf[TableOutputFormat])
|
|
|
- jobConf.set(TableOutputFormat.OUTPUT_TABLE, outputTable)
|
|
|
- jobConf
|
|
|
- }
|
|
|
-
|
|
|
def InitEnv(appName: String): SparkSession = {
|
|
|
InitEnv(appName, null)
|
|
|
}
|
|
@@ -71,5 +37,4 @@ object SparkUtils {
|
|
|
}
|
|
|
spark.getOrCreate()
|
|
|
}
|
|
|
-
|
|
|
}
|