The code for this example is very simple;

object SparkMain extends App with Serializable {

  val conf = new SparkConf(false)
//    .setAppName("cc-test")
//    .setMaster("spark://hadoop-001:7077")
    //.setSparkHome("/tmp")
    .set("spark.driver.host", "192.168.23.108")
    .set("spark.cores.max", "10")
    .set("spark.executor.memory", "512M")
  val sc = new SparkContext("spark://hadoop-001:7077", "cc-test", conf)

  val hc = new HiveContext(sc)

  val input = hc.hql("select * from prod_qdw.prod_sales_summary where dt =
'2013-01-01' limit 10")

  println("#Result: " + input.collect)


}




--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/Running-driver-SparkContent-locally-tp11415p11418.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to