[ 
https://issues.apache.org/jira/browse/SPARK-10507?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

N Campbell updated SPARK-10507:
-------------------------------
    Description: 
TIMESTAMP - TIMESTAMP in ISO-SQL should return an interval type which SPARK 
does not support.. 

A similar expression in Hive 0.13 fails with Error: Could not create ResultSet: 
Required field 'type' is unset! Struct:TPrimitiveTypeEntry(type:null) and SPARK 
has similar "challenges". While Hive 1.2.1 has added some interval type support 
it is far from complete with respect to ISO-SQL. 

The ability to compute the period of time (years, days, weeks, hours, ...) 
between timestamps or add/substract intervals from a timestamp are extremely 
common in business applications. 

Currently, a value expression such as select timestampcol - timestampcol from t 
will fail during execution and not parse time. While the error thrown states 
that fact, it would better for those value expressions to be rejected at parse 
time along with indicating the expression that is causing the parser error.


Operation: execute
Errors:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in 
stage 6214.0 failed 4 times, most recent failure: Lost task 0.3 in stage 6214.0 
(TID 21208, sandbox.hortonworks.com): java.lang.RuntimeException: Type 
TimestampType does not support numeric operations
        at scala.sys.package$.error(package.scala:27)
        at 
org.apache.spark.sql.catalyst.expressions.Subtract.numeric$lzycompute(arithmetic.scala:138)
        at 
org.apache.spark.sql.catalyst.expressions.Subtract.numeric(arithmetic.scala:136)
        at 
org.apache.spark.sql.catalyst.expressions.Subtract.eval(arithmetic.scala:150)
        at 
org.apache.spark.sql.catalyst.expressions.Alias.eval(namedExpressions.scala:113)
        at 
org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(Projection.scala:68)
        at 
org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(Projection.scala:52)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
        at scala.collection.Iterator$class.foreach(Iterator.scala:727)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
        at 
scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
        at 
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
        at 
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
        at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
        at scala.collection.AbstractIterator.to(Iterator.scala:1157)
        at 
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
        at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
        at 
scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
        at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
        at org.apache.spark.rdd.RDD$$anonfun$17.apply(RDD.scala:813)
        at org.apache.spark.rdd.RDD$$anonfun$17.apply(RDD.scala:813)
        at 
org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1498)

create table  if not exists TTS ( RNUM int , CTS timestamp )TERMINATED BY '\n' 
 STORED AS orc  ;


  was:
TIMESTAMP - TIMESTAMP in ISO-SQL is an interval type. Hive 0.13 fails with 
Error: Could not create ResultSet: Required field 'type' is unset! 
Struct:TPrimitiveTypeEntry(type:null) and SPARK has similar "challenges".

select cts - cts from tts 



Operation: execute
Errors:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in 
stage 6214.0 failed 4 times, most recent failure: Lost task 0.3 in stage 6214.0 
(TID 21208, sandbox.hortonworks.com): java.lang.RuntimeException: Type 
TimestampType does not support numeric operations
        at scala.sys.package$.error(package.scala:27)
        at 
org.apache.spark.sql.catalyst.expressions.Subtract.numeric$lzycompute(arithmetic.scala:138)
        at 
org.apache.spark.sql.catalyst.expressions.Subtract.numeric(arithmetic.scala:136)
        at 
org.apache.spark.sql.catalyst.expressions.Subtract.eval(arithmetic.scala:150)
        at 
org.apache.spark.sql.catalyst.expressions.Alias.eval(namedExpressions.scala:113)
        at 
org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(Projection.scala:68)
        at 
org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(Projection.scala:52)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
        at scala.collection.Iterator$class.foreach(Iterator.scala:727)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
        at 
scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
        at 
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
        at 
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
        at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
        at scala.collection.AbstractIterator.to(Iterator.scala:1157)
        at 
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
        at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
        at 
scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
        at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
        at org.apache.spark.rdd.RDD$$anonfun$17.apply(RDD.scala:813)
        at org.apache.spark.rdd.RDD$$anonfun$17.apply(RDD.scala:813)
        at 
org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1498)

create table  if not exists TTS ( RNUM int , CTS timestamp )TERMINATED BY '\n' 
 STORED AS orc  ;



> reject temporal expressions such as timestamp - timestamp at parse time 
> ------------------------------------------------------------------------
>
>                 Key: SPARK-10507
>                 URL: https://issues.apache.org/jira/browse/SPARK-10507
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.3.1
>            Reporter: N Campbell
>            Priority: Minor
>
> TIMESTAMP - TIMESTAMP in ISO-SQL should return an interval type which SPARK 
> does not support.. 
> A similar expression in Hive 0.13 fails with Error: Could not create 
> ResultSet: Required field 'type' is unset! 
> Struct:TPrimitiveTypeEntry(type:null) and SPARK has similar "challenges". 
> While Hive 1.2.1 has added some interval type support it is far from complete 
> with respect to ISO-SQL. 
> The ability to compute the period of time (years, days, weeks, hours, ...) 
> between timestamps or add/substract intervals from a timestamp are extremely 
> common in business applications. 
> Currently, a value expression such as select timestampcol - timestampcol from 
> t will fail during execution and not parse time. While the error thrown 
> states that fact, it would better for those value expressions to be rejected 
> at parse time along with indicating the expression that is causing the parser 
> error.
> Operation: execute
> Errors:
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in 
> stage 6214.0 failed 4 times, most recent failure: Lost task 0.3 in stage 
> 6214.0 (TID 21208, sandbox.hortonworks.com): java.lang.RuntimeException: Type 
> TimestampType does not support numeric operations
>       at scala.sys.package$.error(package.scala:27)
>       at 
> org.apache.spark.sql.catalyst.expressions.Subtract.numeric$lzycompute(arithmetic.scala:138)
>       at 
> org.apache.spark.sql.catalyst.expressions.Subtract.numeric(arithmetic.scala:136)
>       at 
> org.apache.spark.sql.catalyst.expressions.Subtract.eval(arithmetic.scala:150)
>       at 
> org.apache.spark.sql.catalyst.expressions.Alias.eval(namedExpressions.scala:113)
>       at 
> org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(Projection.scala:68)
>       at 
> org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.apply(Projection.scala:52)
>       at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
>       at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
>       at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>       at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>       at 
> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
>       at 
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
>       at 
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
>       at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
>       at scala.collection.AbstractIterator.to(Iterator.scala:1157)
>       at 
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
>       at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
>       at 
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
>       at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
>       at org.apache.spark.rdd.RDD$$anonfun$17.apply(RDD.scala:813)
>       at org.apache.spark.rdd.RDD$$anonfun$17.apply(RDD.scala:813)
>       at 
> org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1498)
> create table  if not exists TTS ( RNUM int , CTS timestamp )TERMINATED BY 
> '\n' 
>  STORED AS orc  ;



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to