[jira] [Commented] (SPARK-16666) Kryo encoder for custom complex classes

2016-08-07 Thread Wenchen Fan (JIRA)

[ 
https://issues.apache.org/jira/browse/SPARK-1?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15411207#comment-15411207
 ] 

Wenchen Fan commented on SPARK-1:
-

[~samehraban] ,  in your stack trace,  `Queryable.scala` doesn't exist anymore, 
can you try it with latest code?

> Kryo encoder for custom complex classes
> ---
>
> Key: SPARK-1
> URL: https://issues.apache.org/jira/browse/SPARK-1
> Project: Spark
>  Issue Type: Question
>  Components: SQL
>Affects Versions: 1.6.2
>Reporter: Sam
>
> I'm trying to create a dataset with some geo data using spark and esri. If 
> `Foo` only have `Point` field, it'll work but if I add some other fields 
> beyond a `Point`, I get ArrayIndexOutOfBoundsException.
> {code:scala}
> import com.esri.core.geometry.Point
> import org.apache.spark.sql.{Encoder, Encoders, SQLContext}
> import org.apache.spark.{SparkConf, SparkContext}
> 
> object Main {
> 
>   case class Foo(position: Point, name: String)
> 
>   object MyEncoders {
> implicit def PointEncoder: Encoder[Point] = Encoders.kryo[Point]
> 
> implicit def FooEncoder: Encoder[Foo] = Encoders.kryo[Foo]
>   }
> 
>   def main(args: Array[String]): Unit = {
> val sc = new SparkContext(new 
> SparkConf().setAppName("app").setMaster("local"))
> val sqlContext = new SQLContext(sc)
> import MyEncoders.{FooEncoder, PointEncoder}
> import sqlContext.implicits._
> Seq(new Foo(new Point(0, 0), "bar")).toDS.show
>   }
> }
> {code}
> {noformat}
> Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 1
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1$$anonfun$apply$2.apply(Queryable.scala:71)
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1$$anonfun$apply$2.apply(Queryable.scala:70)
>  
> at 
> scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)
>  
> at 
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>  
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) 
> at 
> scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)
>  
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1.apply(Queryable.scala:70)
>  
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1.apply(Queryable.scala:69)
>  
> at scala.collection.mutable.ArraySeq.foreach(ArraySeq.scala:73) 
> at 
> org.apache.spark.sql.execution.Queryable$class.formatString(Queryable.scala:69)
>  
> at org.apache.spark.sql.Dataset.formatString(Dataset.scala:65) 
> at org.apache.spark.sql.Dataset.showString(Dataset.scala:263) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:230) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:193) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:201) 
> at Main$.main(Main.scala:24) 
> at Main.main(Main.scala)
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

-
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org



[jira] [Commented] (SPARK-16666) Kryo encoder for custom complex classes

2016-08-05 Thread Sam (JIRA)

[ 
https://issues.apache.org/jira/browse/SPARK-1?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15410454#comment-15410454
 ] 

Sam commented on SPARK-1:
-

[~clockfly] in your code sample, there is a case class for Point, not esri's 
point class.

> Kryo encoder for custom complex classes
> ---
>
> Key: SPARK-1
> URL: https://issues.apache.org/jira/browse/SPARK-1
> Project: Spark
>  Issue Type: Question
>  Components: SQL
>Affects Versions: 1.6.2
>Reporter: Sam
>
> I'm trying to create a dataset with some geo data using spark and esri. If 
> `Foo` only have `Point` field, it'll work but if I add some other fields 
> beyond a `Point`, I get ArrayIndexOutOfBoundsException.
> {code:scala}
> import com.esri.core.geometry.Point
> import org.apache.spark.sql.{Encoder, Encoders, SQLContext}
> import org.apache.spark.{SparkConf, SparkContext}
> 
> object Main {
> 
>   case class Foo(position: Point, name: String)
> 
>   object MyEncoders {
> implicit def PointEncoder: Encoder[Point] = Encoders.kryo[Point]
> 
> implicit def FooEncoder: Encoder[Foo] = Encoders.kryo[Foo]
>   }
> 
>   def main(args: Array[String]): Unit = {
> val sc = new SparkContext(new 
> SparkConf().setAppName("app").setMaster("local"))
> val sqlContext = new SQLContext(sc)
> import MyEncoders.{FooEncoder, PointEncoder}
> import sqlContext.implicits._
> Seq(new Foo(new Point(0, 0), "bar")).toDS.show
>   }
> }
> {code}
> {noformat}
> Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 1
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1$$anonfun$apply$2.apply(Queryable.scala:71)
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1$$anonfun$apply$2.apply(Queryable.scala:70)
>  
> at 
> scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)
>  
> at 
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>  
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) 
> at 
> scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)
>  
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1.apply(Queryable.scala:70)
>  
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1.apply(Queryable.scala:69)
>  
> at scala.collection.mutable.ArraySeq.foreach(ArraySeq.scala:73) 
> at 
> org.apache.spark.sql.execution.Queryable$class.formatString(Queryable.scala:69)
>  
> at org.apache.spark.sql.Dataset.formatString(Dataset.scala:65) 
> at org.apache.spark.sql.Dataset.showString(Dataset.scala:263) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:230) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:193) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:201) 
> at Main$.main(Main.scala:24) 
> at Main.main(Main.scala)
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

-
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org



[jira] [Commented] (SPARK-16666) Kryo encoder for custom complex classes

2016-08-04 Thread Sean Zhong (JIRA)

[ 
https://issues.apache.org/jira/browse/SPARK-1?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15408855#comment-15408855
 ] 

Sean Zhong commented on SPARK-1:


This issue has been fixed in Spark 2.0 and trunk. Can you use latest code 
instead?
{code}
scala> import org.apache.spark.sql.{Encoder, Encoders, SQLContext}
scala> import org.apache.spark.{SparkConf, SparkContext}
scala> case class Point(a: Int, b: Int)
scala> case class Foo(position: Point, name: String)
scala> implicit val PointEncoder: Encoder[Point] = Encoders.kryo[Point]
scala> implicit val FooEncoder: Encoder[Foo] = Encoders.kryo[Foo]
scala> Seq(new Foo(new Point(0, 0), "bar")).toDS.show
++
|   value|
++
|[01 00 24 6C 69 6...|
++
{code}

> Kryo encoder for custom complex classes
> ---
>
> Key: SPARK-1
> URL: https://issues.apache.org/jira/browse/SPARK-1
> Project: Spark
>  Issue Type: Question
>  Components: SQL
>Affects Versions: 1.6.2
>Reporter: Sam
>
> I'm trying to create a dataset with some geo data using spark and esri. If 
> `Foo` only have `Point` field, it'll work but if I add some other fields 
> beyond a `Point`, I get ArrayIndexOutOfBoundsException.
> {code:scala}
> import com.esri.core.geometry.Point
> import org.apache.spark.sql.{Encoder, Encoders, SQLContext}
> import org.apache.spark.{SparkConf, SparkContext}
> 
> object Main {
> 
>   case class Foo(position: Point, name: String)
> 
>   object MyEncoders {
> implicit def PointEncoder: Encoder[Point] = Encoders.kryo[Point]
> 
> implicit def FooEncoder: Encoder[Foo] = Encoders.kryo[Foo]
>   }
> 
>   def main(args: Array[String]): Unit = {
> val sc = new SparkContext(new 
> SparkConf().setAppName("app").setMaster("local"))
> val sqlContext = new SQLContext(sc)
> import MyEncoders.{FooEncoder, PointEncoder}
> import sqlContext.implicits._
> Seq(new Foo(new Point(0, 0), "bar")).toDS.show
>   }
> }
> {code}
> {noformat}
> Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 1
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1$$anonfun$apply$2.apply(Queryable.scala:71)
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1$$anonfun$apply$2.apply(Queryable.scala:70)
>  
> at 
> scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)
>  
> at 
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>  
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) 
> at 
> scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)
>  
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1.apply(Queryable.scala:70)
>  
> at 
> org.apache.spark.sql.execution.Queryable$$anonfun$formatString$1.apply(Queryable.scala:69)
>  
> at scala.collection.mutable.ArraySeq.foreach(ArraySeq.scala:73) 
> at 
> org.apache.spark.sql.execution.Queryable$class.formatString(Queryable.scala:69)
>  
> at org.apache.spark.sql.Dataset.formatString(Dataset.scala:65) 
> at org.apache.spark.sql.Dataset.showString(Dataset.scala:263) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:230) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:193) 
> at org.apache.spark.sql.Dataset.show(Dataset.scala:201) 
> at Main$.main(Main.scala:24) 
> at Main.main(Main.scala)
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

-
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org