abi_pat
abi_pat

Reputation: 602

Scala - Unable to write Scala object into Cassandra

I am trying to write Scala case class objects into Cassandra using Spark. But I am getting an exception while running the code. I guess I am unable to map my case class objects to my Cassandra rows. My Scala code looks like this

CassandraPerformerClass.scala

object CassandraPerformerClass extends App
{
override def main(args: Array[String]) 
{
 val keyspace = "scalakeys1"
 val tablename = "demotable1"
 val conf = new SparkConf().setAppName("CassandraDemo") .setMaster("spark://ct-0015:7077") .setJars(SparkContext.jarOfClass(this.getClass).toSeq)
 conf.set("spark.cassandra.connection.host", "192.168.50.103")
 conf.set("spark.cassandra.connection.native.port", "9041")
 conf.set("spark.cassandra.connection.rpc.port", "9160")
 val sc = new SparkContext(conf);
 CassandraConnector(conf).withSessionDo 
 { session =>
        session.execute("DROP KEYSPACE IF EXISTS "+keyspace+" ;");
        session.execute("CREATE KEYSPACE "+ keyspace +" WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 3};");
        session.execute("CREATE TABLE "+keyspace+"."+tablename+" (keyval bigint, rangef bigint, arrayval text, PRIMARY KEY (rangef, keyval));");
        session.execute("CREATE INDEX index_11 ON "+keyspace+"."+tablename+" (keyval) ;");
  }

 val data = Seq(new Data(1, 10, "string1"), new Data(2, 20, "string2"));
 val collection = sc.parallelize(data)    
 collection.saveToCassandra(keyspace, tablename)
}

case class Data(kv : Long, rf : Long, av : String) extends Serializable
 {
   private var keyval : Long = kv
   private var rangef : Long = rf
   private var arrayval : String = av

   def setKeyval (kv : Long)
   {
     keyval = kv
   }
   def setRangef (rf : Long)
   {
     rangef = rf
   }
   def setArrayval (av : String)
   {
     arrayval = av
   }
   def getKeyval = keyval
   def getRangef = rangef
   def getArrayval = arrayval
   override def toString = keyval + "," + rangef + "," + arrayval
 }
}

Exception

Exception in thread "main" java.lang.IllegalArgumentException: Some primary key columns are missing in RDD or have not been selected: rangef, keyval at com.datastax.spark.connector.writer.DefaultRowWriter.checkMissingPrimaryKeyColumns(DefaultRowWriter.scala:44) at com.datastax.spark.connector.writer.DefaultRowWriter.(DefaultRowWriter.scala:71) at com.datastax.spark.connector.writer.DefaultRowWriter$$anon$2.rowWriter(DefaultRowWriter.scala:109) at com.datastax.spark.connector.writer.DefaultRowWriter$$anon$2.rowWriter(DefaultRowWriter.scala:107) at com.datastax.spark.connector.writer.TableWriter$.apply(TableWriter.scala:170) at com.datastax.spark.connector.RDDFunctions.saveToCassandra(RDDFunctions.scala:23) at com.cleartrail.spark.scala.cassandra.poc.CassandraPerformerClass$.main(CassandraPerformerClass.scala:33) at com.cleartrail.spark.scala.cassandra.poc.CassandraPerformerClass.main(CassandraPerformerClass.scala)

Please tell me how to map my case class object to Cassandra row.

Upvotes: 2

Views: 1995

Answers (1)

maasg
maasg

Reputation: 37435

The Scala-based connector for Spark does not expect a java-bean like case class with getters for the fields. (That's a bad practice anyway - case classes are an immutable alternative to bean-like data containers and have default accessors for the fields and no mutators).

Creating a case class with the same names and types as the Cassandra table will just work:

case class Data(keyval: Long, rangef:Long , arrayval: String) extends Serializable

Upvotes: 4

Related Questions