Can anyone provide some suggestions on how to debug this? Using spark 1.3.1. The json itself seems to be valid (other programs can parse it) and the problem seems to lie in jsonRDD trying to describe & use a schema.

scala> sqlContext.jsonRDD(rdd).count()
java.util.NoSuchElementException: None.get
       at scala.None$.get(Option.scala:313)
       at scala.None$.get(Option.scala:311)
       at org.apache.spark.sql.json.JsonRDD$$anonfun$14.apply(JsonRDD.scala:105)
       at org.apache.spark.sql.json.JsonRDD$$anonfun$14.apply(JsonRDD.scala:101)
       at 
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
       at 
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
       at scala.collection.immutable.Map$Map1.foreach(Map.scala:109)
       at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
       at scala.collection.AbstractTraversable.map(Traversable.scala:105)
       at 
org.apache.spark.sql.json.JsonRDD$.org$apache$spark$sql$json$JsonRDD$$makeStruct$1(JsonRDD.scala:101)
       at org.apache.spark.sql.json.JsonRDD$$anonfun$14.apply(JsonRDD.scala:104)
       at org.apache.spark.sql.json.JsonRDD$$anonfun$14.apply(JsonRDD.scala:101)
       at 
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
       at 
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
       at scala.collection.immutable.Map$Map2.foreach(Map.scala:130)
       at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
       at scala.collection.AbstractTraversable.map(Traversable.scala:105)
       at 
org.apache.spark.sql.json.JsonRDD$.org$apache$spark$sql$json$JsonRDD$$makeStruct$1(JsonRDD.scala:101)
       at org.apache.spark.sql.json.JsonRDD$.createSchema(JsonRDD.scala:132)
       at org.apache.spark.sql.json.JsonRDD$.inferSchema(JsonRDD.scala:56)
       at org.apache.spark.sql.SQLContext.jsonRDD(SQLContext.scala:635)
       at org.apache.spark.sql.SQLContext.jsonRDD(SQLContext.scala:581)
       [...]


---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to