|
@@ -1,11 +1,10 @@
|
|
package com.winhc.bigdata.spark.test
|
|
package com.winhc.bigdata.spark.test
|
|
|
|
|
|
import java.util
|
|
import java.util
|
|
-import java.util.Properties
|
|
|
|
|
|
|
|
-import com.winhc.bigdata.spark.utils.{PhoenixUtil, SparkUtils}
|
|
|
|
|
|
+import com.winhc.bigdata.spark.utils.SparkUtils
|
|
import org.apache.spark.sql.Row
|
|
import org.apache.spark.sql.Row
|
|
-import org.apache.spark.sql.types.{DataType, StringType, StructField, StructType, UserDefinedType, VarcharType}
|
|
|
|
|
|
+import org.apache.spark.sql.types.{DataType, StringType, StructField, StructType, VarcharType}
|
|
|
|
|
|
import scala.collection.mutable
|
|
import scala.collection.mutable
|
|
|
|
|
|
@@ -20,34 +19,22 @@ object TestSpark2PhoenixJDBC {
|
|
"spark.hadoop.odps.spark.local.partition.amt" -> "100"
|
|
"spark.hadoop.odps.spark.local.partition.amt" -> "100"
|
|
)
|
|
)
|
|
val sparkSession = SparkUtils.InitEnv("scala spark on Phoenix5.x test", map)
|
|
val sparkSession = SparkUtils.InitEnv("scala spark on Phoenix5.x test", map)
|
|
- val DB_PHOENIX_URL = PhoenixUtil.getPhoenixJDBCUrl
|
|
|
|
|
|
|
|
- // JDBC连接属性
|
|
|
|
- val SQL_QUERY = " ( SELECT ID,NCID,CID,TITLE FROM COMPANY_BID_LIST limit 10 ) events "
|
|
|
|
- val connProp = PhoenixUtil.getPhoenixProperties
|
|
|
|
- val pDf = sparkSession.read.jdbc(DB_PHOENIX_URL, SQL_QUERY, connProp)
|
|
|
|
- val sc = pDf.schema
|
|
|
|
- println(sc)
|
|
|
|
- pDf.printSchema()
|
|
|
|
- pDf.show()
|
|
|
|
- import sparkSession.implicits._
|
|
|
|
import sparkSession._
|
|
import sparkSession._
|
|
|
|
|
|
- var dt:DataType = VarcharType(255)
|
|
|
|
-// dt = StringType
|
|
|
|
|
|
+ val dt: DataType = StringType
|
|
val schema = StructType(Array(
|
|
val schema = StructType(Array(
|
|
- StructField("k", dt, nullable = false),
|
|
|
|
- StructField("s", dt, nullable = true),
|
|
|
|
- StructField("time", dt, nullable = true)
|
|
|
|
|
|
+ StructField("ROWKEY", dt, nullable = false),
|
|
|
|
+ StructField("ID", dt, nullable = false),
|
|
|
|
+ StructField("NAME", dt, nullable = true),
|
|
|
|
+ StructField("ADDR", dt, nullable = true)
|
|
)
|
|
)
|
|
)
|
|
)
|
|
val dataList = new util.ArrayList[Row]()
|
|
val dataList = new util.ArrayList[Row]()
|
|
- dataList.add(Row("1", "2", "null"))
|
|
|
|
|
|
+ dataList.add(Row("adsfa", "1", "2", "null"))
|
|
val df = createDataFrame(dataList, schema)
|
|
val df = createDataFrame(dataList, schema)
|
|
-
|
|
|
|
- df.write
|
|
|
|
- .mode("append")
|
|
|
|
- .jdbc(DB_PHOENIX_URL, "TEST_P", connProp)
|
|
|
|
|
|
+ import com.winhc.bigdata.spark.implicits.PhoenixHelper._
|
|
|
|
+ df.save2PhoenixByJDBC("PHX_TEST")
|
|
|
|
|
|
sparkSession.stop()
|
|
sparkSession.stop()
|
|
}
|
|
}
|