Repository: spark Updated Branches: refs/heads/master 3f4060c34 -> 3d82f6eb7
http://git-wip-us.apache.org/repos/asf/spark/blob/3d82f6eb/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out ---------------------------------------------------------------------- diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out new file mode 100644 index 0000000..44fa48e --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out @@ -0,0 +1,354 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 44 + + +-- !query 0 +CREATE TEMPORARY VIEW t AS SELECT 1 +-- !query 0 schema +struct<> +-- !query 0 output + + + +-- !query 1 +SELECT 1 + '2' FROM t +-- !query 1 schema +struct<(CAST(1 AS DOUBLE) + CAST(2 AS DOUBLE)):double> +-- !query 1 output +3.0 + + +-- !query 2 +SELECT 1 - '2' FROM t +-- !query 2 schema +struct<(CAST(1 AS DOUBLE) - CAST(2 AS DOUBLE)):double> +-- !query 2 output +-1.0 + + +-- !query 3 +SELECT 1 * '2' FROM t +-- !query 3 schema +struct<(CAST(1 AS DOUBLE) * CAST(2 AS DOUBLE)):double> +-- !query 3 output +2.0 + + +-- !query 4 +SELECT 4 / '2' FROM t +-- !query 4 schema +struct<(CAST(4 AS DOUBLE) / CAST(CAST(2 AS DOUBLE) AS DOUBLE)):double> +-- !query 4 output +2.0 + + +-- !query 5 +SELECT 1.1 + '2' FROM t +-- !query 5 schema +struct<(CAST(1.1 AS DOUBLE) + CAST(2 AS DOUBLE)):double> +-- !query 5 output +3.1 + + +-- !query 6 +SELECT 1.1 - '2' FROM t +-- !query 6 schema +struct<(CAST(1.1 AS DOUBLE) - CAST(2 AS DOUBLE)):double> +-- !query 6 output +-0.8999999999999999 + + +-- !query 7 +SELECT 1.1 * '2' FROM t +-- !query 7 schema +struct<(CAST(1.1 AS DOUBLE) * CAST(2 AS DOUBLE)):double> +-- !query 7 output +2.2 + + +-- !query 8 +SELECT 4.4 / '2' FROM t +-- !query 8 schema +struct<(CAST(4.4 AS DOUBLE) / CAST(2 AS DOUBLE)):double> +-- !query 8 output +2.2 + + +-- !query 9 +SELECT 1.1 + '2.2' FROM t +-- !query 9 schema +struct<(CAST(1.1 AS DOUBLE) + CAST(2.2 AS DOUBLE)):double> +-- !query 9 output +3.3000000000000003 + + +-- !query 10 +SELECT 1.1 - '2.2' FROM t +-- !query 10 schema +struct<(CAST(1.1 AS DOUBLE) - CAST(2.2 AS DOUBLE)):double> +-- !query 10 output +-1.1 + + +-- !query 11 +SELECT 1.1 * '2.2' FROM t +-- !query 11 schema +struct<(CAST(1.1 AS DOUBLE) * CAST(2.2 AS DOUBLE)):double> +-- !query 11 output +2.4200000000000004 + + +-- !query 12 +SELECT 4.4 / '2.2' FROM t +-- !query 12 schema +struct<(CAST(4.4 AS DOUBLE) / CAST(2.2 AS DOUBLE)):double> +-- !query 12 output +2.0 + + +-- !query 13 +SELECT '$' || cast(1 as smallint) || '$' FROM t +-- !query 13 schema +struct<concat(concat($, CAST(CAST(1 AS SMALLINT) AS STRING)), $):string> +-- !query 13 output +$1$ + + +-- !query 14 +SELECT '$' || 1 || '$' FROM t +-- !query 14 schema +struct<concat(concat($, CAST(1 AS STRING)), $):string> +-- !query 14 output +$1$ + + +-- !query 15 +SELECT '$' || cast(1 as bigint) || '$' FROM t +-- !query 15 schema +struct<concat(concat($, CAST(CAST(1 AS BIGINT) AS STRING)), $):string> +-- !query 15 output +$1$ + + +-- !query 16 +SELECT '$' || cast(1.1 as float) || '$' FROM t +-- !query 16 schema +struct<concat(concat($, CAST(CAST(1.1 AS FLOAT) AS STRING)), $):string> +-- !query 16 output +$1.1$ + + +-- !query 17 +SELECT '$' || cast(1.1 as double) || '$' FROM t +-- !query 17 schema +struct<concat(concat($, CAST(CAST(1.1 AS DOUBLE) AS STRING)), $):string> +-- !query 17 output +$1.1$ + + +-- !query 18 +SELECT '$' || 1.1 || '$' FROM t +-- !query 18 schema +struct<concat(concat($, CAST(1.1 AS STRING)), $):string> +-- !query 18 output +$1.1$ + + +-- !query 19 +SELECT '$' || cast(1.1 as decimal(8,3)) || '$' FROM t +-- !query 19 schema +struct<concat(concat($, CAST(CAST(1.1 AS DECIMAL(8,3)) AS STRING)), $):string> +-- !query 19 output +$1.100$ + + +-- !query 20 +SELECT '$' || 'abcd' || '$' FROM t +-- !query 20 schema +struct<concat(concat($, abcd), $):string> +-- !query 20 output +$abcd$ + + +-- !query 21 +SELECT '$' || date('1996-09-09') || '$' FROM t +-- !query 21 schema +struct<concat(concat($, CAST(CAST(1996-09-09 AS DATE) AS STRING)), $):string> +-- !query 21 output +$1996-09-09$ + + +-- !query 22 +SELECT '$' || timestamp('1996-09-09 10:11:12.4' )|| '$' FROM t +-- !query 22 schema +struct<concat(concat($, CAST(CAST(1996-09-09 10:11:12.4 AS TIMESTAMP) AS STRING)), $):string> +-- !query 22 output +$1996-09-09 10:11:12.4$ + + +-- !query 23 +SELECT length(cast(1 as smallint)) FROM t +-- !query 23 schema +struct<length(CAST(CAST(1 AS SMALLINT) AS STRING)):int> +-- !query 23 output +1 + + +-- !query 24 +SELECT length(cast(1 as int)) FROM t +-- !query 24 schema +struct<length(CAST(CAST(1 AS INT) AS STRING)):int> +-- !query 24 output +1 + + +-- !query 25 +SELECT length(cast(1 as bigint)) FROM t +-- !query 25 schema +struct<length(CAST(CAST(1 AS BIGINT) AS STRING)):int> +-- !query 25 output +1 + + +-- !query 26 +SELECT length(cast(1.1 as float)) FROM t +-- !query 26 schema +struct<length(CAST(CAST(1.1 AS FLOAT) AS STRING)):int> +-- !query 26 output +3 + + +-- !query 27 +SELECT length(cast(1.1 as double)) FROM t +-- !query 27 schema +struct<length(CAST(CAST(1.1 AS DOUBLE) AS STRING)):int> +-- !query 27 output +3 + + +-- !query 28 +SELECT length(1.1) FROM t +-- !query 28 schema +struct<length(CAST(1.1 AS STRING)):int> +-- !query 28 output +3 + + +-- !query 29 +SELECT length(cast(1.1 as decimal(8,3))) FROM t +-- !query 29 schema +struct<length(CAST(CAST(1.1 AS DECIMAL(8,3)) AS STRING)):int> +-- !query 29 output +5 + + +-- !query 30 +SELECT length('four') FROM t +-- !query 30 schema +struct<length(four):int> +-- !query 30 output +4 + + +-- !query 31 +SELECT length(date('1996-09-10')) FROM t +-- !query 31 schema +struct<length(CAST(CAST(1996-09-10 AS DATE) AS STRING)):int> +-- !query 31 output +10 + + +-- !query 32 +SELECT length(timestamp('1996-09-10 10:11:12.4')) FROM t +-- !query 32 schema +struct<length(CAST(CAST(1996-09-10 10:11:12.4 AS TIMESTAMP) AS STRING)):int> +-- !query 32 output +21 + + +-- !query 33 +SELECT year( '1996-01-10') FROM t +-- !query 33 schema +struct<year(CAST(1996-01-10 AS DATE)):int> +-- !query 33 output +1996 + + +-- !query 34 +SELECT month( '1996-01-10') FROM t +-- !query 34 schema +struct<month(CAST(1996-01-10 AS DATE)):int> +-- !query 34 output +1 + + +-- !query 35 +SELECT day( '1996-01-10') FROM t +-- !query 35 schema +struct<dayofmonth(CAST(1996-01-10 AS DATE)):int> +-- !query 35 output +10 + + +-- !query 36 +SELECT hour( '10:11:12') FROM t +-- !query 36 schema +struct<hour(CAST(10:11:12 AS TIMESTAMP)):int> +-- !query 36 output +10 + + +-- !query 37 +SELECT minute( '10:11:12') FROM t +-- !query 37 schema +struct<minute(CAST(10:11:12 AS TIMESTAMP)):int> +-- !query 37 output +11 + + +-- !query 38 +SELECT second( '10:11:12') FROM t +-- !query 38 schema +struct<second(CAST(10:11:12 AS TIMESTAMP)):int> +-- !query 38 output +12 + + +-- !query 39 +select 1 like '%' FROM t +-- !query 39 schema +struct<CAST(1 AS STRING) LIKE %:boolean> +-- !query 39 output +true + + +-- !query 40 +select date('1996-09-10') like '19%' FROM t +-- !query 40 schema +struct<CAST(CAST(1996-09-10 AS DATE) AS STRING) LIKE 19%:boolean> +-- !query 40 output +true + + +-- !query 41 +select '1' like 1 FROM t +-- !query 41 schema +struct<1 LIKE CAST(1 AS STRING):boolean> +-- !query 41 output +true + + +-- !query 42 +select '1 ' like 1 FROM t +-- !query 42 schema +struct<1 LIKE CAST(1 AS STRING):boolean> +-- !query 42 output +false + + +-- !query 43 +select '1996-09-10' like date('1996-09-10') FROM t +-- !query 43 schema +struct<1996-09-10 LIKE CAST(CAST(1996-09-10 AS DATE) AS STRING):boolean> +-- !query 43 output +true http://git-wip-us.apache.org/repos/asf/spark/blob/3d82f6eb/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index cac3e12..e3901af 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -300,7 +300,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { Locale.setDefault(originalLocale) // For debugging dump some statistics about how much time was spent in various optimizer rules - logInfo(RuleExecutor.dumpTimeSpent()) + logWarning(RuleExecutor.dumpTimeSpent()) } finally { super.afterAll() } http://git-wip-us.apache.org/repos/asf/spark/blob/3d82f6eb/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala index dbea036..a58000d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala @@ -41,7 +41,7 @@ class TPCDSQuerySuite extends QueryTest with SharedSQLContext with BeforeAndAfte protected override def afterAll(): Unit = { try { // For debugging dump some statistics about how much time was spent in various optimizer rules - logInfo(RuleExecutor.dumpTimeSpent()) + logWarning(RuleExecutor.dumpTimeSpent()) spark.sessionState.catalog.reset() } finally { super.afterAll() http://git-wip-us.apache.org/repos/asf/spark/blob/3d82f6eb/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala ---------------------------------------------------------------------- diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala index def70a5..45791c6 100644 --- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala +++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala @@ -76,7 +76,7 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter { TestHive.setConf(SQLConf.SESSION_LOCAL_TIMEZONE, originalSessionLocalTimeZone) // For debugging dump some statistics about how much time was spent in various optimizer rules - logInfo(RuleExecutor.dumpTimeSpent()) + logWarning(RuleExecutor.dumpTimeSpent()) } finally { super.afterAll() } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org