Hi,

I am using spark-shell and the steps using which I can reproduce the issue
are as follows:

scala> val dateDimDF=
sqlContext.load("jdbc",Map("url"->"jdbc:teradata://192.168.145.58/DBS_PORT=1025,DATABASE=BENCHQADS,LOB_SUPPORT=OFF,USER=
BENCHQADS,PASSWORD=abc","dbtable" -> "date_dim")) 

scala> dateDimDF.printSchema()

root
 |-- d_date_sk: integer (nullable = false)
 |-- d_date_id: string (nullable = false)
 |-- d_date: date (nullable = true)
 |-- d_month_seq: integer (nullable = true)
 |-- d_week_seq: integer (nullable = true)
 |-- d_quarter_seq: integer (nullable = true)
 |-- d_year: integer (nullable = true)
 |-- d_dow: integer (nullable = true)
 |-- d_moy: integer (nullable = true)
 |-- d_dom: integer (nullable = true)
 |-- d_qoy: integer (nullable = true)
 |-- d_fy_year: integer (nullable = true)
 |-- d_fy_quarter_seq: integer (nullable = true)
 |-- d_fy_week_seq: integer (nullable = true)
 |-- d_day_name: string (nullable = true)
 |-- d_quarter_name: string (nullable = true)
 |-- d_holiday: string (nullable = true)
 |-- d_weekend: string (nullable = true)
 |-- d_following_holiday: string (nullable = true)
 |-- d_first_dom: integer (nullable = true)
 |-- d_last_dom: integer (nullable = true)
 |-- d_same_day_ly: integer (nullable = true)
 |-- d_same_day_lq: integer (nullable = true)
 |-- d_current_day: string (nullable = true)
 |-- d_current_week: string (nullable = true)
 |-- d_current_month: string (nullable = true)
 |-- d_current_quarter: string (nullable = true)
 |-- d_current_year: string (nullable = true)

scala> dateDimDF.saveAsTable("date_dim_tera_save")

15/05/13 19:57:05 INFO JDBCRDD: closed connection
15/05/13 19:57:05 ERROR Executor: Exception in task 0.0 in stage 2.0 (TID 2)
java.lang.ClassCastException: java.sql.Date cannot be cast to
java.lang.Integer
        at scala.runtime.BoxesRunTime.unboxToInt(BoxesRunTime.java:106)
        at
org.apache.spark.sql.parquet.RowWriteSupport.writePrimitive(ParquetTableSupport.scala:215)
        at
org.apache.spark.sql.parquet.RowWriteSupport.writeValue(ParquetTableSupport.scala:192)
        at
org.apache.spark.sql.parquet.RowWriteSupport.write(ParquetTableSupport.scala:171)
        at
org.apache.spark.sql.parquet.RowWriteSupport.write(ParquetTableSupport.scala:134)
        at
parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:120)
        at
parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:81)
        at
parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:37)
        at
org.apache.spark.sql.parquet.ParquetRelation2.org$apache$spark$sql$parquet$ParquetRelation2$$writeShard$1(newParquet.scala:671)
        at
org.apache.spark.sql.parquet.ParquetRelation2$$anonfun$insert$2.apply(newParquet.scala:689)
        at
org.apache.spark.sql.parquet.ParquetRelation2$$anonfun$insert$2.apply(newParquet.scala:689)
        at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
        at org.apache.spark.scheduler.Task.run(Task.scala:64)
        at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)
        at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:722)
15/05/13 19:57:05 WARN TaskSetManager: Lost task 0.0 in stage 2.0 (TID 2,
localhost): java.lang.ClassCastException: java.sql.Date cannot be cast to
java.lang.Integer
        at scala.runtime.BoxesRunTime.unboxToInt(BoxesRunTime.java:106)
        at
org.apache.spark.sql.parquet.RowWriteSupport.writePrimitive(ParquetTableSupport.scala:215)
        at
org.apache.spark.sql.parquet.RowWriteSupport.writeValue(ParquetTableSupport.scala:192)
        at
org.apache.spark.sql.parquet.RowWriteSupport.write(ParquetTableSupport.scala:171)
        at
org.apache.spark.sql.parquet.RowWriteSupport.write(ParquetTableSupport.scala:134)
        at
parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:120)
        at
parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:81)
        at
parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:37)
        at
org.apache.spark.sql.parquet.ParquetRelation2.org$apache$spark$sql$parquet$ParquetRelation2$$writeShard$1(newParquet.scala:671)
        at
org.apache.spark.sql.parquet.ParquetRelation2$$anonfun$insert$2.apply(newParquet.scala:689)
        at
org.apache.spark.sql.parquet.ParquetRelation2$$anonfun$insert$2.apply(newParquet.scala:689)
        at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
        at org.apache.spark.scheduler.Task.run(Task.scala:64)
        at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)
        at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:722)


scala > val itemDF=
sqlContext.load("jdbc",Map("url"->"jdbc:teradata://192.168.145.58/DBS_PORT=1025,DATABASE=BENCHQADS,LOB_SUPPORT=OFF,USER=
BENCHQADS,PASSWORD=abc","dbtable" -> "item"))

scala > itemDF.printSchema()
root
 |-- i_item_sk: integer (nullable = false)
 |-- i_item_id: string (nullable = false)
 |-- i_rec_start_date: date (nullable = true)
 |-- i_rec_end_date: date (nullable = true)
 |-- i_item_desc: string (nullable = true)
 |-- i_current_price: decimal (nullable = true)
 |-- i_wholesale_cost: decimal (nullable = true)
 |-- i_brand_id: integer (nullable = true)
 |-- i_brand: string (nullable = true)
 |-- i_class_id: integer (nullable = true)
 |-- i_class: string (nullable = true)
 |-- i_category_id: integer (nullable = true)
 |-- i_category: string (nullable = true)
 |-- i_manufact_id: integer (nullable = true)
 |-- i_manufact: string (nullable = true)
 |-- i_size: string (nullable = true)
 |-- i_formulation: string (nullable = true)
 |-- i_color: string (nullable = true)
 |-- i_units: string (nullable = true)
 |-- i_container: string (nullable = true)
 |-- i_manager_id: integer (nullable = true)
 |-- i_product_name: string (nullable = true)

scala > itemDF.saveAsTable("item_tera_save")

java.lang.RuntimeException: Unsupported datatype DecimalType()
        at scala.sys.package$.error(package.scala:27)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$$anonfun$fromDataType$2.apply(ParquetTypes.scala:372)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$$anonfun$fromDataType$2.apply(ParquetTypes.scala:316)
        at scala.Option.getOrElse(Option.scala:120)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$.fromDataType(ParquetTypes.scala:315)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$$anonfun$4.apply(ParquetTypes.scala:395)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$$anonfun$4.apply(ParquetTypes.scala:394)
        at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
        at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
        at scala.collection.immutable.List.foreach(List.scala:318)
        at
scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
        at scala.collection.AbstractTraversable.map(Traversable.scala:105)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$.convertFromAttributes(ParquetTypes.scala:393)
        at
org.apache.spark.sql.parquet.ParquetTypesConverter$.writeMetaData(ParquetTypes.scala:440)
        at
org.apache.spark.sql.parquet.ParquetRelation2$MetadataCache.prepareMetadata(newParquet.scala:260)
        at
org.apache.spark.sql.parquet.ParquetRelation2$MetadataCache$$anonfun$6.apply(newParquet.scala:276)
        at
org.apache.spark.sql.parquet.ParquetRelation2$MetadataCache$$anonfun$6.apply(newParquet.scala:269)
        at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
        at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
        at scala.collection.immutable.List.foreach(List.scala:318)
        at
scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
        at scala.collection.AbstractTraversable.map(Traversable.scala:105)
        at
org.apache.spark.sql.parquet.ParquetRelation2$MetadataCache.refresh(newParquet.scala:269)
        at
org.apache.spark.sql.parquet.ParquetRelation2.<init>(newParquet.scala:391)
        at
org.apache.spark.sql.parquet.DefaultSource.createRelation(newParquet.scala:98)
        at
org.apache.spark.sql.parquet.DefaultSource.createRelation(newParquet.scala:128)
        at
org.apache.spark.sql.sources.ResolvedDataSource$.apply(ddl.scala:240)
        at
org.apache.spark.sql.hive.execution.CreateMetastoreDataSourceAsSelect.run(commands.scala:218)
        at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:54)
        at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:54)
        at
org.apache.spark.sql.execution.ExecutedCommand.execute(commands.scala:64)
        at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1099)
        at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1099)
        at org.apache.spark.sql.DataFrame.saveAsTable(DataFrame.scala:1121)
        at org.apache.spark.sql.DataFrame.saveAsTable(DataFrame.scala:1071)
        at org.apache.spark.sql.DataFrame.saveAsTable(DataFrame.scala:1037)
        at org.apache.spark.sql.DataFrame.saveAsTable(DataFrame.scala:1015)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:22)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:27)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:29)
        at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
        at $iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
        at $iwC$$iwC$$iwC.<init>(<console>:35)
        at $iwC$$iwC.<init>(<console>:37)
        at $iwC.<init>(<console>:39)
        at <init>(<console>:41)
        at .<init>(<console>:45)
        at .<clinit>(<console>)
        at .<init>(<console>:7)
        at .<clinit>(<console>)
        at $print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:601)
        at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
        at
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:856)
        at
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:901)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:813)
        at
org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:656)
        at
org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:664)
        at
org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:669)
        at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:996)
        at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
        at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
        at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at
org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:944)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1058)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:601)
        at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
        at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
        at
org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Regards,
Ishwardeep




--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/Spark-SQL-1-3-1-data-frame-saveAsTable-returns-exception-tp22867p22869.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to