Repository: spark
Updated Branches:
  refs/heads/master acaf2a81a -> cf9367826


[SPARK-17018][SQL] literals.sql for testing literal parsing

## What changes were proposed in this pull request?
This patch adds literals.sql for testing literal parsing end-to-end in SQL.

## How was this patch tested?
The patch itself is only about adding test cases.

Author: petermaxlee <petermax...@gmail.com>

Closes #14598 from petermaxlee/SPARK-17018-2.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cf936782
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cf936782
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cf936782

Branch: refs/heads/master
Commit: cf9367826c38e5f34ae69b409f5d09c55ed1d319
Parents: acaf2a8
Author: petermaxlee <petermax...@gmail.com>
Authored: Thu Aug 11 13:55:10 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu Aug 11 13:55:10 2016 -0700

----------------------------------------------------------------------
 .../resources/sql-tests/inputs/literals.sql     |  92 +++++
 .../sql-tests/inputs/number-format.sql          |  16 -
 .../sql-tests/results/literals.sql.out          | 374 +++++++++++++++++++
 .../sql-tests/results/number-format.sql.out     |  42 ---
 .../apache/spark/sql/SQLQueryTestSuite.scala    |  14 +-
 5 files changed, 476 insertions(+), 62 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/cf936782/sql/core/src/test/resources/sql-tests/inputs/literals.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/literals.sql 
b/sql/core/src/test/resources/sql-tests/inputs/literals.sql
new file mode 100644
index 0000000..62f0d3d
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/literals.sql
@@ -0,0 +1,92 @@
+-- Literal parsing
+
+-- null
+select null, Null, nUll;
+
+-- boolean
+select true, tRue, false, fALse;
+
+-- byte (tinyint)
+select 1Y;
+select 127Y, -128Y;
+
+-- out of range byte
+select 128Y;
+
+-- short (smallint)
+select 1S;
+select 32767S, -32768S;
+
+-- out of range short
+select 32768S;
+
+-- long (bigint)
+select 1L, 2147483648L;
+select 9223372036854775807L, -9223372036854775808L;
+
+-- out of range long
+select 9223372036854775808L;
+
+-- integral parsing
+
+-- parse int
+select 1, -1;
+
+-- parse int max and min value as int
+select 2147483647, -2147483648;
+
+-- parse long max and min value as long
+select 9223372036854775807, -9223372036854775808;
+
+-- parse as decimals (Long.MaxValue + 1, and Long.MinValue - 1)
+select 9223372036854775808, -9223372036854775809;
+
+-- out of range decimal numbers
+select 1234567890123456789012345678901234567890;
+select 1234567890123456789012345678901234567890.0;
+
+-- double
+select 1D, 1.2D, 1e10, 1.5e5, .10D, 0.10D, .1e5, .9e+2, 0.9e+2, 900e-1, 9.e+1;
+select -1D, -1.2D, -1e10, -1.5e5, -.10D, -0.10D, -.1e5;
+-- negative double
+select .e3;
+-- inf and -inf
+select 1E309, -1E309;
+
+-- decimal parsing
+select 0.3, -0.8, .5, -.18, 0.1111, .1111;
+
+-- super large scientific notation numbers should still be valid doubles
+select 123456789012345678901234567890123456789e10, 
123456789012345678901234567890123456789.1e10;
+
+-- string
+select "Hello Peter!", 'hello lee!';
+-- multi string
+select 'hello' 'world', 'hello' " " 'lee';
+-- single quote within double quotes
+select "hello 'peter'";
+select 'pattern%', 'no-pattern\%', 'pattern\\%', 'pattern\\\%';
+select '\'', '"', '\n', '\r', '\t', 'Z';
+-- "Hello!" in octals
+select '\110\145\154\154\157\041';
+-- "World :)" in unicode
+select '\u0057\u006F\u0072\u006C\u0064\u0020\u003A\u0029';
+
+-- date
+select dAte '2016-03-12';
+-- invalid date
+select date 'mar 11 2016';
+
+-- timestamp
+select tImEstAmp '2016-03-11 20:54:00.000';
+-- invalid timestamp
+select timestamp '2016-33-11 20:54:00.000';
+
+-- interval
+select interval 13.123456789 seconds, interval -13.123456789 second;
+select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 
millisecond, 9 microsecond;
+-- ns is not supported
+select interval 10 nanoseconds;
+
+-- unsupported data type
+select GEO '(10,-6)';

http://git-wip-us.apache.org/repos/asf/spark/blob/cf936782/sql/core/src/test/resources/sql-tests/inputs/number-format.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/number-format.sql 
b/sql/core/src/test/resources/sql-tests/inputs/number-format.sql
deleted file mode 100644
index a32d068..0000000
--- a/sql/core/src/test/resources/sql-tests/inputs/number-format.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- Verifies how we parse numbers
-
--- parse as ints
-select 1, -1;
-
--- parse as longs (Int.MaxValue + 1, and Int.MinValue - 1)
-select 2147483648, -2147483649;
-
--- parse long min and max value
-select 9223372036854775807, -9223372036854775808;
-
--- parse as decimals (Long.MaxValue + 1, and Long.MinValue - 1)
-select 9223372036854775808, -9223372036854775809;
-
--- various floating point (decimal) formats
-select 0.3, -0.8, .5, -.18, 0.1111;

http://git-wip-us.apache.org/repos/asf/spark/blob/cf936782/sql/core/src/test/resources/sql-tests/results/literals.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out 
b/sql/core/src/test/resources/sql-tests/results/literals.sql.out
new file mode 100644
index 0000000..6d5fabd
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out
@@ -0,0 +1,374 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 38
+
+
+-- !query 0
+select null, Null, nUll
+-- !query 0 schema
+struct<NULL:null,NULL:null,NULL:null>
+-- !query 0 output
+NULL   NULL    NULL
+
+
+-- !query 1
+select true, tRue, false, fALse
+-- !query 1 schema
+struct<true:boolean,true:boolean,false:boolean,false:boolean>
+-- !query 1 output
+true   true    false   false
+
+
+-- !query 2
+select 1Y
+-- !query 2 schema
+struct<1:tinyint>
+-- !query 2 output
+1
+
+
+-- !query 3
+select 127Y, -128Y
+-- !query 3 schema
+struct<>
+-- !query 3 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"128" Radix:10(line 1, pos 14)
+
+== SQL ==
+select 127Y, -128Y
+--------------^^^
+
+
+-- !query 4
+select 128Y
+-- !query 4 schema
+struct<>
+-- !query 4 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"128" Radix:10(line 1, pos 7)
+
+== SQL ==
+select 128Y
+-------^^^
+
+
+-- !query 5
+select 1S
+-- !query 5 schema
+struct<1:smallint>
+-- !query 5 output
+1
+
+
+-- !query 6
+select 32767S, -32768S
+-- !query 6 schema
+struct<>
+-- !query 6 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"32768" Radix:10(line 1, pos 16)
+
+== SQL ==
+select 32767S, -32768S
+----------------^^^
+
+
+-- !query 7
+select 32768S
+-- !query 7 schema
+struct<>
+-- !query 7 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Value out of range. Value:"32768" Radix:10(line 1, pos 7)
+
+== SQL ==
+select 32768S
+-------^^^
+
+
+-- !query 8
+select 1L, 2147483648L
+-- !query 8 schema
+struct<1:bigint,2147483648:bigint>
+-- !query 8 output
+1      2147483648
+
+
+-- !query 9
+select 9223372036854775807L, -9223372036854775808L
+-- !query 9 schema
+struct<>
+-- !query 9 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+For input string: "9223372036854775808"(line 1, pos 30)
+
+== SQL ==
+select 9223372036854775807L, -9223372036854775808L
+------------------------------^^^
+
+
+-- !query 10
+select 9223372036854775808L
+-- !query 10 schema
+struct<>
+-- !query 10 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+For input string: "9223372036854775808"(line 1, pos 7)
+
+== SQL ==
+select 9223372036854775808L
+-------^^^
+
+
+-- !query 11
+select 1, -1
+-- !query 11 schema
+struct<1:int,(-1):int>
+-- !query 11 output
+1      -1
+
+
+-- !query 12
+select 2147483647, -2147483648
+-- !query 12 schema
+struct<2147483647:int,(-2147483648):bigint>
+-- !query 12 output
+2147483647     -2147483648
+
+
+-- !query 13
+select 9223372036854775807, -9223372036854775808
+-- !query 13 schema
+struct<9223372036854775807:bigint,(-9223372036854775808):decimal(19,0)>
+-- !query 13 output
+9223372036854775807    -9223372036854775808
+
+
+-- !query 14
+select 9223372036854775808, -9223372036854775809
+-- !query 14 schema
+struct<9223372036854775808:decimal(19,0),(-9223372036854775809):decimal(19,0)>
+-- !query 14 output
+9223372036854775808    -9223372036854775809
+
+
+-- !query 15
+select 1234567890123456789012345678901234567890
+-- !query 15 schema
+struct<>
+-- !query 15 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+DecimalType can only support precision up to 38
+== SQL ==
+select 1234567890123456789012345678901234567890
+
+
+-- !query 16
+select 1234567890123456789012345678901234567890.0
+-- !query 16 schema
+struct<>
+-- !query 16 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+DecimalType can only support precision up to 38
+== SQL ==
+select 1234567890123456789012345678901234567890.0
+
+
+-- !query 17
+select 1D, 1.2D, 1e10, 1.5e5, .10D, 0.10D, .1e5, .9e+2, 0.9e+2, 900e-1, 9.e+1
+-- !query 17 schema
+struct<1.0:double,1.2:double,1.0E10:double,150000.0:double,0.1:double,0.1:double,10000.0:double,90.0:double,90.0:double,90.0:double,90.0:double>
+-- !query 17 output
+1.0    1.2     1.0E10  150000.0        0.1     0.1     10000.0 90.0    90.0    
90.0    90.0
+
+
+-- !query 18
+select -1D, -1.2D, -1e10, -1.5e5, -.10D, -0.10D, -.1e5
+-- !query 18 schema
+struct<(-1.0):double,(-1.2):double,(-1.0E10):double,(-150000.0):double,(-0.1):double,(-0.1):double,(-10000.0):double>
+-- !query 18 output
+-1.0   -1.2    -1.0E10 -150000.0       -0.1    -0.1    -10000.0
+
+
+-- !query 19
+select .e3
+-- !query 19 schema
+struct<>
+-- !query 19 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+no viable alternative at input 'select .'(line 1, pos 7)
+
+== SQL ==
+select .e3
+-------^^^
+
+
+-- !query 20
+select 1E309, -1E309
+-- !query 20 schema
+struct<Infinity:double,(-Infinity):double>
+-- !query 20 output
+Infinity       -Infinity
+
+
+-- !query 21
+select 0.3, -0.8, .5, -.18, 0.1111, .1111
+-- !query 21 schema
+struct<0.3:decimal(1,1),(-0.8):decimal(1,1),0.5:decimal(1,1),(-0.18):decimal(2,2),0.1111:decimal(4,4),0.1111:decimal(4,4)>
+-- !query 21 output
+0.3    -0.8    0.5     -0.18   0.1111  0.1111
+
+
+-- !query 22
+select 123456789012345678901234567890123456789e10, 
123456789012345678901234567890123456789.1e10
+-- !query 22 schema
+struct<1.2345678901234568E48:double,1.2345678901234568E48:double>
+-- !query 22 output
+1.2345678901234568E48  1.2345678901234568E48
+
+
+-- !query 23
+select "Hello Peter!", 'hello lee!'
+-- !query 23 schema
+struct<Hello Peter!:string,hello lee!:string>
+-- !query 23 output
+Hello Peter!   hello lee!
+
+
+-- !query 24
+select 'hello' 'world', 'hello' " " 'lee'
+-- !query 24 schema
+struct<helloworld:string,hello lee:string>
+-- !query 24 output
+helloworld     hello lee
+
+
+-- !query 25
+select "hello 'peter'"
+-- !query 25 schema
+struct<hello 'peter':string>
+-- !query 25 output
+hello 'peter'
+
+
+-- !query 26
+select 'pattern%', 'no-pattern\%', 'pattern\\%', 'pattern\\\%'
+-- !query 26 schema
+struct<pattern%:string,no-pattern\%:string,pattern\%:string,pattern\\%:string>
+-- !query 26 output
+pattern%       no-pattern\%    pattern\%       pattern\\%
+
+
+-- !query 27
+select '\'', '"', '\n', '\r', '\t', 'Z'
+-- !query 27 schema
+struct<':string,":string,
+:string,
:string,        :string,Z:string>
+-- !query 27 output
+'      "       
+       
                        Z
+
+
+-- !query 28
+select '\110\145\154\154\157\041'
+-- !query 28 schema
+struct<Hello!:string>
+-- !query 28 output
+Hello!
+
+
+-- !query 29
+select '\u0057\u006F\u0072\u006C\u0064\u0020\u003A\u0029'
+-- !query 29 schema
+struct<World :):string>
+-- !query 29 output
+World :)
+
+
+-- !query 30
+select dAte '2016-03-12'
+-- !query 30 schema
+struct<DATE '2016-03-12':date>
+-- !query 30 output
+2016-03-12
+
+
+-- !query 31
+select date 'mar 11 2016'
+-- !query 31 schema
+struct<>
+-- !query 31 output
+java.lang.IllegalArgumentException
+null
+
+
+-- !query 32
+select tImEstAmp '2016-03-11 20:54:00.000'
+-- !query 32 schema
+struct<TIMESTAMP('2016-03-11 20:54:00.0'):timestamp>
+-- !query 32 output
+2016-03-11 20:54:00
+
+
+-- !query 33
+select timestamp '2016-33-11 20:54:00.000'
+-- !query 33 schema
+struct<>
+-- !query 33 output
+java.lang.IllegalArgumentException
+Timestamp format must be yyyy-mm-dd hh:mm:ss[.fffffffff]
+
+
+-- !query 34
+select interval 13.123456789 seconds, interval -13.123456789 second
+-- !query 34 schema
+struct<>
+-- !query 34 output
+scala.MatchError
+(interval 13 seconds 123 milliseconds 456 microseconds,CalendarIntervalType) 
(of class scala.Tuple2)
+
+
+-- !query 35
+select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 
millisecond, 9 microsecond
+-- !query 35 schema
+struct<>
+-- !query 35 output
+scala.MatchError
+(interval 1 years 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds 8 
milliseconds,CalendarIntervalType) (of class scala.Tuple2)
+
+
+-- !query 36
+select interval 10 nanoseconds
+-- !query 36 schema
+struct<>
+-- !query 36 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+No interval can be constructed(line 1, pos 16)
+
+== SQL ==
+select interval 10 nanoseconds
+----------------^^^
+
+
+-- !query 37
+select GEO '(10,-6)'
+-- !query 37 schema
+struct<>
+-- !query 37 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Literals of type 'GEO' are currently not supported.(line 1, pos 7)
+
+== SQL ==
+select GEO '(10,-6)'
+-------^^^

http://git-wip-us.apache.org/repos/asf/spark/blob/cf936782/sql/core/src/test/resources/sql-tests/results/number-format.sql.out
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/results/number-format.sql.out 
b/sql/core/src/test/resources/sql-tests/results/number-format.sql.out
deleted file mode 100644
index 82a1d39..0000000
--- a/sql/core/src/test/resources/sql-tests/results/number-format.sql.out
+++ /dev/null
@@ -1,42 +0,0 @@
--- Automatically generated by SQLQueryTestSuite
--- Number of queries: 5
-
-
--- !query 0
-select 1, -1
--- !query 0 schema
-struct<1:int,(-1):int>
--- !query 0 output
-1      -1
-
-
--- !query 1
-select 2147483648, -2147483649
--- !query 1 schema
-struct<2147483648:bigint,(-2147483649):bigint>
--- !query 1 output
-2147483648     -2147483649
-
-
--- !query 2
-select 9223372036854775807, -9223372036854775808
--- !query 2 schema
-struct<9223372036854775807:bigint,(-9223372036854775808):decimal(19,0)>
--- !query 2 output
-9223372036854775807    -9223372036854775808
-
-
--- !query 3
-select 9223372036854775808, -9223372036854775809
--- !query 3 schema
-struct<9223372036854775808:decimal(19,0),(-9223372036854775809):decimal(19,0)>
--- !query 3 output
-9223372036854775808    -9223372036854775809
-
-
--- !query 4
-select 0.3, -0.8, .5, -.18, 0.1111
--- !query 4 schema
-struct<0.3:decimal(1,1),(-0.8):decimal(1,1),0.5:decimal(1,1),(-0.18):decimal(2,2),0.1111:decimal(4,4)>
--- !query 4 output
-0.3    -0.8    0.5     -0.18   0.1111

http://git-wip-us.apache.org/repos/asf/spark/blob/cf936782/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
index 1022c38..069a9b6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
@@ -143,7 +143,7 @@ class SQLQueryTestSuite extends QueryTest with 
SharedSQLContext {
       QueryOutput(
         sql = sql,
         schema = schema.catalogString,
-        output = output.mkString("\n"))
+        output = output.mkString("\n").trim)
     }
 
     if (regenerateGoldenFiles) {
@@ -180,9 +180,15 @@ class SQLQueryTestSuite extends QueryTest with 
SharedSQLContext {
     }
 
     outputs.zip(expectedOutputs).zipWithIndex.foreach { case ((output, 
expected), i) =>
-      assertResult(expected.sql, s"SQL query should match for query #$i") { 
output.sql }
-      assertResult(expected.schema, s"Schema should match for query #$i") { 
output.schema }
-      assertResult(expected.output, s"Result should match for query #$i") { 
output.output }
+      assertResult(expected.sql, s"SQL query did not match for query 
#$i\n${expected.sql}") {
+        output.sql
+      }
+      assertResult(expected.schema, s"Schema did not match for query 
#$i\n${expected.sql}") {
+        output.schema
+      }
+      assertResult(expected.output, s"Result dit not match for query 
#$i\n${expected.sql}") {
+        output.output
+      }
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to