|
@@ -20,10 +20,11 @@ package org.apache.spark.sql.execution.datasources.jdbc
|
|
import java.sql.{Connection, Driver, DriverManager, JDBCType, PreparedStatement, ResultSet, ResultSetMetaData, SQLException}
|
|
import java.sql.{Connection, Driver, DriverManager, JDBCType, PreparedStatement, ResultSet, ResultSetMetaData, SQLException}
|
|
import java.util.Locale
|
|
import java.util.Locale
|
|
|
|
|
|
|
|
+import com.winhc.bigdata.spark.const.BaseConst
|
|
|
|
+
|
|
import scala.collection.JavaConverters._
|
|
import scala.collection.JavaConverters._
|
|
import scala.util.Try
|
|
import scala.util.Try
|
|
import scala.util.control.NonFatal
|
|
import scala.util.control.NonFatal
|
|
-
|
|
|
|
import org.apache.spark.TaskContext
|
|
import org.apache.spark.TaskContext
|
|
import org.apache.spark.executor.InputMetrics
|
|
import org.apache.spark.executor.InputMetrics
|
|
import org.apache.spark.internal.Logging
|
|
import org.apache.spark.internal.Logging
|
|
@@ -142,7 +143,11 @@ object JdbcUtils extends Logging {
|
|
}.mkString(",")
|
|
}.mkString(",")
|
|
}
|
|
}
|
|
val placeholders = rddSchema.fields.map(_ => "?").mkString(",")
|
|
val placeholders = rddSchema.fields.map(_ => "?").mkString(",")
|
|
- s"INSERT INTO $table ($columns) VALUES ($placeholders)"
|
|
|
|
|
|
+ if (table.contains(BaseConst.PHOENIX_TABLE_NAME_FLAG)) {
|
|
|
|
+ s"UPSERT INTO ${table.replace("\001", "")} ($columns) VALUES ($placeholders)"
|
|
|
|
+ } else {
|
|
|
|
+ s"INSERT INTO $table ($columns) VALUES ($placeholders)"
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -638,7 +643,7 @@ object JdbcUtils extends Logging {
|
|
conn.setAutoCommit(false) // Everything in the same db transaction.
|
|
conn.setAutoCommit(false) // Everything in the same db transaction.
|
|
conn.setTransactionIsolation(finalIsolationLevel)
|
|
conn.setTransactionIsolation(finalIsolationLevel)
|
|
}
|
|
}
|
|
- val stmt = conn.prepareStatement(insertStmt.replace("INSERT", "UPSERT"))
|
|
|
|
|
|
+ val stmt = conn.prepareStatement(insertStmt)
|
|
val setters = rddSchema.fields.map(f => makeSetter(conn, dialect, f.dataType))
|
|
val setters = rddSchema.fields.map(f => makeSetter(conn, dialect, f.dataType))
|
|
val nullTypes = rddSchema.fields.map(f => getJdbcType(f.dataType, dialect).jdbcNullType)
|
|
val nullTypes = rddSchema.fields.map(f => getJdbcType(f.dataType, dialect).jdbcNullType)
|
|
val numFields = rddSchema.fields.length
|
|
val numFields = rddSchema.fields.length
|