|
@@ -11,6 +11,11 @@ import org.apache.spark.sql.DataFrame
|
|
|
object PhoenixHelper {
|
|
|
|
|
|
implicit class DataFrameEnhancer(df: DataFrame) {
|
|
|
+ /**
|
|
|
+ * 只有 catalog为in-memory方式可用
|
|
|
+ *
|
|
|
+ * @param tableName
|
|
|
+ */
|
|
|
def save2Phoenix(tableName: String): Unit = {
|
|
|
val tmpTable = "tmp_" + tableName
|
|
|
df.sparkSession.sql(PhoenixUtil.getPhoenixTempView(tmpTable, tableName))
|
|
@@ -19,6 +24,11 @@ object PhoenixHelper {
|
|
|
.insertInto(tmpTable + BaseConst.PHOENIX_TABLE_NAME_FLAG)
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * catalog不限
|
|
|
+ *
|
|
|
+ * @param tableName
|
|
|
+ */
|
|
|
def save2PhoenixByJDBC(tableName: String): Unit = {
|
|
|
df.write
|
|
|
.mode("append")
|