import $ivy.`org.apache.spark::spark-sql:3.0.1` // Or use any other 2.x version here
import $ivy.`org.apache.spark::spark-hive:3.0.1`
import $ivy.`sh.almond::almond-spark:0.6.0`
import $ivy.`org.apache.spark::spark-yarn:3.0.1`

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.hive._
import org.apache.spark.sql._
import org.apache.spark.rdd._

import org.apache.log4j.{Level, Logger}
Logger.getLogger("org").setLevel(Level.OFF)



val spark = {
  NotebookSparkSession.builder()
    .master("yarn")
    .appName("Scala Spark Notebook")
    .config("spark.executor.instances", "4")
    .config("spark.executor.cores", "2")
    .getOrCreate()
}
val sc = spark.sparkContext