jgarcia2
jgarcia2

Reputation: 23

Error package org.apache.spark.sql is not a value when I try to compile a SBT in Spark- Scala

I'm trying to compile a SBT package in Apache Spark (Scala) but receive the following error:

[error] /home/bigdata/jgarcia2/test/src/main/scala/test.scala:37: package org.apache.spark.sql is not a value
[error] val dbschema = sql("SELECT * From table")

This is my code:

import org.apache.spark._
import org.apache.log4j.Logger
import org.apache.log4j.Level
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql._
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
object test {


  def main(args: Array[String]) {


    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("akka").setLevel(Level.OFF)


    if (args.length < 1) {
      System.err.println("Usage: test <file>")
      System.exit(1)
    }

    val sparkConf = new SparkConf().setAppName("test")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new SQLContext(sc)
    val df = sqlContext.read.format("com.databricks.spark.xml").option("rowTag", "row").load(args(0))
    df.registerTempTable("table")

    val dbschema = sql("SELECT * From table")
    dbschema.printSchema()
    sc.stop()
  } }

Can anyone help me to solve the problem?

Upvotes: 2

Views: 1834

Answers (1)

stefanobaghino
stefanobaghino

Reputation: 12804

Since you imported org.apache.spark._ (which contains the sql package), the compiler thinks you are trying to invoke a package as a function, thus reporting the error.

Just write sqlContext.sql and the source will compile.

import org.apache.log4j.Logger
import org.apache.log4j.Level
import org.apache.spark.sql.SQLContext
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf

object Test {

  def main(args: Array[String]) {

    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("akka").setLevel(Level.OFF)

    if (args.length < 1) {
      System.err.println("Usage: test <file>")
      System.exit(1)
    }

    val sparkConf = new SparkConf().setAppName("test")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new SQLContext(sc)
    val df = sqlContext.read.format("com.databricks.spark.xml").option("rowTag", "row").load(args(0))
    df.registerTempTable("table")

    val dbschema = sqlContext.sql("SELECT * From table")
    dbschema.printSchema()
    sc.stop()

  }

}

Upvotes: 2

Related Questions