Reputation: 3
I am running a spark job using overwrite mode. I was expecting that it will delete the data in the table and will insert new data. However it is just appending the data to it .
I was expecting same behavior as when save moce override is used in fileSystem,
object HiveToMemSQL {
def main(args: Array[String]) {
val log = Logger.getLogger(HiveToMemSQL.getClass)
//var options = getOptions()
//val cmdLineArgs = new CommandLineOptions().validateArguments(args, options)
//if (cmdLineArgs != null) {
// Get command line options values
var query = "select * from default.students"
// Get destination DB details from command line
val destHostName ="localhost"
//val destUserName = cmdLineArgs.getOptionValue("destUserName")
//val destPassword = cmdLineArgs.getOptionValue("destPassword")
val destDBName ="tsg"
val destTable = "ORC_POS_TEST"
val destPort = 3308
val destConnInfo = MemSQLConnectionInfo(destHostName, destPort, "root", "", destDBName)
val spark = SparkSession.builder().appName("Hive To MemSQL")
.config("maxRecordsPerBatch" ,"100")
.config("spark.memsql.host", destConnInfo.dbHost)
.config("spark.memsql.port", destConnInfo.dbPort.toString)
.config("spark.memsql.user", destConnInfo.user)
.config("spark.memsql.password", destConnInfo.password)
.config("spark.memsql.defaultDatabase", destConnInfo.dbName)
// .config("org.apache.spark.sql.SaveMode" , SaveMode.Overwrite.toString())
.config("spark.memsql.defaultSaveMode" , "Overwrite")
.config("maxRecordsPerBatch" ,"100").master("local[*]").enableHiveSupport().getOrCreate()
import spark.implicits._
import spark.sql
// Queries are expressed in HiveQL
val sqlDF = spark.sql("select* from tsg.v_pos_krogus_wk_test")
log.info("Successfully read data from source")
sqlDF.printSchema()
sqlDF.printSchema()
// MemSQL destination DB Master Aggregator, Port, Username and Password
import spark.implicits._
// Disabling writing to leaf nodes directly
var saveConf = SaveToMemSQLConf(spark.memSQLConf,
params = Map("useKeylessShardingOptimization" -> "false",
"writeToMaster" -> "false" ,
"saveMode" -> SaveMode.Overwrite.toString()))
log.info("Save mode before :" + saveConf.saveMode )
saveConf= saveConf.copy(saveMode=SaveMode.Overwrite)
log.info("Save mode after :" + saveConf.saveMode )
val tableIdent = TableIdentifier(destDBName, destTable)
sqlDF.saveToMemSQL(tableIdent, saveConf)
log.info("Successfully completed writing to MemSQL DB")
}}
Upvotes: 0
Views: 548
Reputation: 26
The MemSQL Spark Connector setting will write a REPLACE statement. REPLACE works exactly like INSERT, except that if an old row in the table has the same value as a new row for a PRIMARY KEY, the old row is deleted before the new row is inserted. See https://docs.memsql.com/sql-reference/v6.0/replace/
Upvotes: 1