I am working on data science and trying to convert my sequence of data into a data frame. It was a single line of code to convert which was very easy but I am getting an exception. I tried to solve the exception which was thrown by defining the class outside the main function. Even after that, I am getting the same exception. Can anyone help me?
package sparkWCExample.spWCExample
import org.apache.log4j.Level
import org.apache.spark.sql.{Dataset, SparkSession , DataFrame , Row , Encoders }
import org.apache.spark.sql.functions._
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.Row
import org.apache.spark.sql.Dataset
// Create the case classes for our domain
case class Department(id: String, name: String)
case class Person(name: String, age: Long)
object DatasetExample {
def main(args: Array[String]){
println("Start now")
val conf = new SparkConf().setAppName("Spark Scala WordCount Example").setMaster("local[1]")
val spark = SparkSession.builder().config(conf).appName("CsvExample").master("local").getOrCreate()
val sqlContext = new org.apache.spark.sql.SQLContext(spark.sparkContext)
import sqlContext.implicits._
import spark.implicits._
//val df = spark.read.options(Map("inferSchema"->"true","delimiter"->",","header"->"true")).csv("C:\\Sankha\\Study\\data\\salary.csv")
// Create the Departments
val department1 = new Department("123456", "Computer Science")
val department2 = new Department("789012", "Mechanical Engineering")
val department3 = new Department("345678", "Theater and Drama")
val department4 = new Department("901234", "Indoor Recreation")
val caseClassDS = Seq(Person("Andy", 32)).toDS()
val df = Seq(department1,department2,department3,department4).toDF
}
}