(sbt) scala: import org.apache.spark.SparkContext import org.apache.spark.SparkConf import org.apache.spark.sql object SimpleApp { def main(args: Array[String]) { val conf = new SparkConf() conf.setAppName("mytest").setMaster("spark://Master:7077") val sc = new SparkContext(conf) val sqlContext = new sql.SQLContext(sc) val d=sqlContext.read.json("/home/hadoop/2015data_test/Data/Data/100808cb11e9898816ef15fcdde4e1d74cbc0b/Db6Jh2XeQ.json") sc.stop() } } ______________________________________________________________________________________________ after sbt package : ./spark-submit --class "SimpleApp" /home/hadoop/Downloads/sbt/bin/target/scala-2.10/simple-project_2.10-1.0.jar _______________________________________________________________________________________________ json fIle: { "programmers": [ { "firstName": "Brett", "lastName": "McLaughlin", "email": "aaaa" }, { "firstName": "Jason", "lastName": "Hunter", "email": "bbbb" }, { "firstName": "Elliotte", "lastName": "Harold", "email": "cccc" } ], "authors": [ { "firstName": "Isaac", "lastName": "Asimov", "genre": "sciencefiction" }, { "firstName": "Tad", "lastName": "Williams", "genre": "fantasy" }, { "firstName": "Frank", "lastName": "Peretti", "genre": "christianfiction" } ], "musicians": [ { "firstName": "Eric", "lastName": "Clapton", "instrument": "guitar" }, { "firstName": "Sergei", "lastName": "Rachmaninoff", "instrument": "piano" } ] } _______________________________________________________________________________________________ Exception in thread "main" scala.MatchError: StringType (of class org.apache.spark.sql.types.StringType$) at org.apache.spark.sql.json.InferSchema$.apply(InferSchema.scala:58) at org.apache.spark.sql.json.JSONRelation$$anonfun$schema$1.apply(JSONRelation.scala:139) _______________________________________________________________________________________________ why????
-- View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/sql-Exception-in-thread-main-scala-MatchError-StringType-tp25868.html Sent from the Apache Spark User List mailing list archive at Nabble.com. --------------------------------------------------------------------- To unsubscribe, e-mail: user-unsubscr...@spark.apache.org For additional commands, e-mail: user-h...@spark.apache.org