Repository: spark
Updated Branches:
  refs/heads/master 69bb7715f -> 7463a88be


[SPARK-20690][SQL] Subqueries in FROM should have alias names

## What changes were proposed in this pull request?

We add missing attributes into Filter in Analyzer. But we shouldn't do it 
through subqueries like this:

    select 1 from  (select 1 from onerow t1 LIMIT 1) where  t1.c1=1

This query works in current codebase. However, the outside where clause 
shouldn't be able to refer `t1.c1` attribute.

The root cause is we allow subqueries in FROM have no alias names previously, 
it is confusing and isn't supported by various databases such as MySQL, 
Postgres, Oracle. We shouldn't support it too.

## How was this patch tested?

Jenkins tests.

Please review http://spark.apache.org/contributing.html before opening a pull 
request.

Author: Liang-Chi Hsieh <vii...@gmail.com>

Closes #17935 from viirya/SPARK-20690.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7463a88b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7463a88b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7463a88b

Branch: refs/heads/master
Commit: 7463a88be6eecf14ba0a87048a975a06b03b928b
Parents: 69bb771
Author: Liang-Chi Hsieh <vii...@gmail.com>
Authored: Wed May 17 12:57:35 2017 +0800
Committer: Wenchen Fan <wenc...@databricks.com>
Committed: Wed May 17 12:57:35 2017 +0800

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/parser/SqlBase.g4   |  2 +-
 .../spark/sql/catalyst/analysis/Analyzer.scala    |  4 ++--
 .../spark/sql/catalyst/parser/AstBuilder.scala    |  5 ++---
 .../plans/logical/basicLogicalOperators.scala     |  6 ++++++
 .../sql/catalyst/parser/PlanParserSuite.scala     | 11 +++++++++++
 .../test/resources/sql-tests/inputs/group-by.sql  |  2 +-
 .../src/test/resources/sql-tests/inputs/limit.sql |  2 +-
 .../sql-tests/inputs/string-functions.sql         |  2 +-
 .../subquery/in-subquery/in-set-operations.sql    |  2 +-
 .../negative-cases/invalid-correlation.sql        |  2 +-
 .../scalar-subquery/scalar-subquery-predicate.sql |  2 +-
 .../src/test/resources/sql-tests/inputs/union.sql |  4 ++--
 .../resources/sql-tests/results/group-by.sql.out  |  2 +-
 .../resources/sql-tests/results/limit.sql.out     |  2 +-
 .../sql-tests/results/string-functions.sql.out    | 12 +++++++-----
 .../in-subquery/in-set-operations.sql.out         |  2 +-
 .../negative-cases/invalid-correlation.sql.out    |  2 +-
 .../scalar-subquery-predicate.sql.out             |  2 +-
 .../resources/sql-tests/results/union.sql.out     |  4 ++--
 .../org/apache/spark/sql/CachedTableSuite.scala   | 18 +++++++++---------
 .../org/apache/spark/sql/SubquerySuite.scala      |  8 ++++----
 21 files changed, 57 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index f99ce24..dc11e53 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -473,7 +473,7 @@ identifierComment
 
 relationPrimary
     : tableIdentifier sample? (AS? strictIdentifier)?      #tableName
-    | '(' queryNoWith ')' sample? (AS? strictIdentifier)?  #aliasedQuery
+    | '(' queryNoWith ')' sample? (AS? strictIdentifier)   #aliasedQuery
     | '(' relation ')' sample? (AS? strictIdentifier)?     #aliasedRelation
     | inlineTable                                          #inlineTableDefault2
     | functionTable                                        #tableValuedFunction

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 7538a64..d58b8ac 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -1045,7 +1045,7 @@ class Analyzer(
       // Skip sort with aggregate. This will be handled in 
ResolveAggregateFunctions
       case sa @ Sort(_, _, child: Aggregate) => sa
 
-      case s @ Sort(order, _, child) if child.resolved =>
+      case s @ Sort(order, _, child) if !s.resolved && child.resolved =>
         try {
           val newOrder = order.map(resolveExpressionRecursively(_, 
child).asInstanceOf[SortOrder])
           val requiredAttrs = AttributeSet(newOrder).filter(_.resolved)
@@ -1066,7 +1066,7 @@ class Analyzer(
           case ae: AnalysisException => s
         }
 
-      case f @ Filter(cond, child) if child.resolved =>
+      case f @ Filter(cond, child) if !f.resolved && child.resolved =>
         try {
           val newCond = resolveExpressionRecursively(cond, child)
           val requiredAttrs = newCond.references.filter(_.resolved)

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 740422b..f033fd4 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -745,9 +745,8 @@ class AstBuilder(conf: SQLConf) extends 
SqlBaseBaseVisitor[AnyRef] with Logging
    * hooks.
    */
   override def visitAliasedQuery(ctx: AliasedQueryContext): LogicalPlan = 
withOrigin(ctx) {
-    plan(ctx.queryNoWith)
-      .optionalMap(ctx.sample)(withSample)
-      .optionalMap(ctx.strictIdentifier)(aliasPlan)
+    aliasPlan(ctx.strictIdentifier,
+      plan(ctx.queryNoWith).optionalMap(ctx.sample)(withSample))
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
index 2c19265..7a54995 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
@@ -798,6 +798,12 @@ case class LocalLimit(limitExpr: Expression, child: 
LogicalPlan) extends UnaryNo
   }
 }
 
+/**
+ * Aliased subquery.
+ *
+ * @param alias the alias name for this subquery.
+ * @param child the logical plan of this subquery.
+ */
 case class SubqueryAlias(
     alias: String,
     child: LogicalPlan)

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
index cf137cf..cca0291 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
@@ -444,6 +444,17 @@ class PlanParserSuite extends PlanTest {
         |      (select id from t0)) as u_1
       """.stripMargin,
       plan.union(plan).union(plan).as("u_1").select('id))
+
+  }
+
+  test("aliased subquery") {
+    assertEqual("select a from (select id as a from t0) tt",
+      table("t0").select('id.as("a")).as("tt").select('a))
+    intercept("select a from (select id as a from t0)", "mismatched input")
+
+    assertEqual("from (select id as a from t0) tt select a",
+      table("t0").select('id.as("a")).as("tt").select('a))
+    intercept("from (select id as a from t0) select a", "extraneous input 'a'")
   }
 
   test("scalar sub-query") {

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/group-by.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/group-by.sql 
b/sql/core/src/test/resources/sql-tests/inputs/group-by.sql
index 1e13845..bc21207 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/group-by.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/group-by.sql
@@ -34,7 +34,7 @@ SELECT SKEWNESS(a), KURTOSIS(a), MIN(a), MAX(a), AVG(a), 
VARIANCE(a), STDDEV(a),
 FROM testData;
 
 -- Aggregate with foldable input and multiple distinct groups.
-SELECT COUNT(DISTINCT b), COUNT(DISTINCT b, c) FROM (SELECT 1 AS a, 2 AS b, 3 
AS c) GROUP BY a;
+SELECT COUNT(DISTINCT b), COUNT(DISTINCT b, c) FROM (SELECT 1 AS a, 2 AS b, 3 
AS c) t GROUP BY a;
 
 -- Aliases in SELECT could be used in GROUP BY
 SELECT a AS k, COUNT(b) FROM testData GROUP BY k;

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/limit.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/limit.sql 
b/sql/core/src/test/resources/sql-tests/inputs/limit.sql
index f21912a..df555bd 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/limit.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/limit.sql
@@ -21,7 +21,7 @@ SELECT * FROM testdata LIMIT true;
 SELECT * FROM testdata LIMIT 'a';
 
 -- limit within a subquery
-SELECT * FROM (SELECT * FROM range(10) LIMIT 5) WHERE id > 3;
+SELECT * FROM (SELECT * FROM range(10) LIMIT 5) t WHERE id > 3;
 
 -- limit ALL
 SELECT * FROM testdata WHERE key < 3 LIMIT ALL;

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql 
b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
index f685779..da4b39c 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
@@ -7,4 +7,4 @@ select 'a' || 'b' || 'c';
 
 -- Check if catalyst combine nested `Concat`s
 EXPLAIN EXTENDED SELECT (col1 || col2 || col3 || col4) col
-FROM (SELECT id col1, id col2, id col3, id col4 FROM range(10));
+FROM (SELECT id col1, id col2, id col3, id col4 FROM range(10)) t;

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/subquery/in-subquery/in-set-operations.sql
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/inputs/subquery/in-subquery/in-set-operations.sql
 
b/sql/core/src/test/resources/sql-tests/inputs/subquery/in-subquery/in-set-operations.sql
index 5c371d2..42f84e9 100644
--- 
a/sql/core/src/test/resources/sql-tests/inputs/subquery/in-subquery/in-set-operations.sql
+++ 
b/sql/core/src/test/resources/sql-tests/inputs/subquery/in-subquery/in-set-operations.sql
@@ -394,7 +394,7 @@ FROM   (SELECT *
                              FROM   t1)) t4
         WHERE  t4.t2b IN (SELECT Min(t3b)
                           FROM   t3
-                          WHERE  t4.t2a = t3a));
+                          WHERE  t4.t2a = t3a)) T;
 
 -- UNION, UNION ALL, UNION DISTINCT, INTERSECT and EXCEPT for NOT IN
 -- TC 01.12

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/subquery/negative-cases/invalid-correlation.sql
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/inputs/subquery/negative-cases/invalid-correlation.sql
 
b/sql/core/src/test/resources/sql-tests/inputs/subquery/negative-cases/invalid-correlation.sql
index e22cade..f3f0c76 100644
--- 
a/sql/core/src/test/resources/sql-tests/inputs/subquery/negative-cases/invalid-correlation.sql
+++ 
b/sql/core/src/test/resources/sql-tests/inputs/subquery/negative-cases/invalid-correlation.sql
@@ -23,7 +23,7 @@ AND    t2b = (SELECT max(avg)
               FROM   (SELECT   t2b, avg(t2b) avg
                       FROM     t2
                       WHERE    t2a = t1.t1b
-                     )
+                     ) T
              )
 ;
 

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/subquery/scalar-subquery/scalar-subquery-predicate.sql
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/inputs/subquery/scalar-subquery/scalar-subquery-predicate.sql
 
b/sql/core/src/test/resources/sql-tests/inputs/subquery/scalar-subquery/scalar-subquery-predicate.sql
index fb0d07f..dbe8d76 100644
--- 
a/sql/core/src/test/resources/sql-tests/inputs/subquery/scalar-subquery/scalar-subquery-predicate.sql
+++ 
b/sql/core/src/test/resources/sql-tests/inputs/subquery/scalar-subquery/scalar-subquery-predicate.sql
@@ -19,7 +19,7 @@ AND    c.cv = (SELECT max(avg)
                FROM   (SELECT   c1.cv, avg(c1.cv) avg
                        FROM     c c1
                        WHERE    c1.ck = p.pk
-                       GROUP BY c1.cv));
+                       GROUP BY c1.cv) T);
 
 create temporary view t1 as select * from values
   ('val1a', 6S, 8, 10L, float(15.0), 20D, 20E2, timestamp '2014-04-04 
00:00:00.000', date '2014-04-04'),

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/inputs/union.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/union.sql 
b/sql/core/src/test/resources/sql-tests/inputs/union.sql
index e57d69e..63bc044 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/union.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/union.sql
@@ -5,7 +5,7 @@ CREATE OR REPLACE TEMPORARY VIEW t2 AS VALUES (1.0, 1), (2.0, 
4) tbl(c1, c2);
 SELECT *
 FROM   (SELECT * FROM t1
         UNION ALL
-        SELECT * FROM t1);
+        SELECT * FROM t1) T;
 
 -- Type Coerced Union
 SELECT *
@@ -13,7 +13,7 @@ FROM   (SELECT * FROM t1
         UNION ALL
         SELECT * FROM t2
         UNION ALL
-        SELECT * FROM t2);
+        SELECT * FROM t2) T;
 
 -- Regression test for SPARK-18622
 SELECT a

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out 
b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out
index 42e8230..1467985 100644
--- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out
@@ -134,7 +134,7 @@ struct<skewness(CAST(a AS DOUBLE)):double,kurtosis(CAST(a 
AS DOUBLE)):double,min
 
 
 -- !query 14
-SELECT COUNT(DISTINCT b), COUNT(DISTINCT b, c) FROM (SELECT 1 AS a, 2 AS b, 3 
AS c) GROUP BY a
+SELECT COUNT(DISTINCT b), COUNT(DISTINCT b, c) FROM (SELECT 1 AS a, 2 AS b, 3 
AS c) t GROUP BY a
 -- !query 14 schema
 struct<count(DISTINCT b):bigint,count(DISTINCT b, c):bigint>
 -- !query 14 output

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
index 146abe6..afdd6df 100644
--- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
@@ -93,7 +93,7 @@ The limit expression must be integer type, but got string;
 
 
 -- !query 10
-SELECT * FROM (SELECT * FROM range(10) LIMIT 5) WHERE id > 3
+SELECT * FROM (SELECT * FROM range(10) LIMIT 5) t WHERE id > 3
 -- !query 10 schema
 struct<id:bigint>
 -- !query 10 output

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
index d48d1a8..37727ca 100644
--- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
@@ -30,20 +30,22 @@ abc
 
 -- !query 3
 EXPLAIN EXTENDED SELECT (col1 || col2 || col3 || col4) col
-FROM (SELECT id col1, id col2, id col3, id col4 FROM range(10))
+FROM (SELECT id col1, id col2, id col3, id col4 FROM range(10)) t
 -- !query 3 schema
 struct<plan:string>
 -- !query 3 output
 == Parsed Logical Plan ==
 'Project [concat(concat(concat('col1, 'col2), 'col3), 'col4) AS col#x]
-+- 'Project ['id AS col1#x, 'id AS col2#x, 'id AS col3#x, 'id AS col4#x]
-   +- 'UnresolvedTableValuedFunction range, [10]
++- 'SubqueryAlias t
+   +- 'Project ['id AS col1#x, 'id AS col2#x, 'id AS col3#x, 'id AS col4#x]
+      +- 'UnresolvedTableValuedFunction range, [10]
 
 == Analyzed Logical Plan ==
 col: string
 Project [concat(concat(concat(cast(col1#xL as string), cast(col2#xL as 
string)), cast(col3#xL as string)), cast(col4#xL as string)) AS col#x]
-+- Project [id#xL AS col1#xL, id#xL AS col2#xL, id#xL AS col3#xL, id#xL AS 
col4#xL]
-   +- Range (0, 10, step=1, splits=None)
++- SubqueryAlias t
+   +- Project [id#xL AS col1#xL, id#xL AS col2#xL, id#xL AS col3#xL, id#xL AS 
col4#xL]
+      +- Range (0, 10, step=1, splits=None)
 
 == Optimized Logical Plan ==
 Project [concat(cast(id#xL as string), cast(id#xL as string), cast(id#xL as 
string), cast(id#xL as string)) AS col#x]

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-set-operations.sql.out
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-set-operations.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-set-operations.sql.out
index e06f920..5780f49 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-set-operations.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-set-operations.sql.out
@@ -496,7 +496,7 @@ FROM   (SELECT *
                              FROM   t1)) t4
         WHERE  t4.t2b IN (SELECT Min(t3b)
                           FROM   t3
-                          WHERE  t4.t2a = t3a))
+                          WHERE  t4.t2a = t3a)) T
 -- !query 13 schema
 
struct<t2a:string,t2b:smallint,t2c:int,t2d:bigint,t2e:float,t2f:double,t2g:decimal(2,-2),t2h:timestamp,t2i:date>
 -- !query 13 output

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out
index e4b1a2d..ca3930b 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out
@@ -40,7 +40,7 @@ AND    t2b = (SELECT max(avg)
               FROM   (SELECT   t2b, avg(t2b) avg
                       FROM     t2
                       WHERE    t2a = t1.t1b
-                     )
+                     ) T
              )
 -- !query 3 schema
 struct<>

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out
index 8b29300..1d5dddc 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-predicate.sql.out
@@ -39,7 +39,7 @@ AND    c.cv = (SELECT max(avg)
                FROM   (SELECT   c1.cv, avg(c1.cv) avg
                        FROM     c c1
                        WHERE    c1.ck = p.pk
-                       GROUP BY c1.cv))
+                       GROUP BY c1.cv) T)
 -- !query 3 schema
 struct<pk:int,cv:int>
 -- !query 3 output

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/resources/sql-tests/results/union.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/union.sql.out 
b/sql/core/src/test/resources/sql-tests/results/union.sql.out
index d123b7f..865b3ae 100644
--- a/sql/core/src/test/resources/sql-tests/results/union.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/union.sql.out
@@ -22,7 +22,7 @@ struct<>
 SELECT *
 FROM   (SELECT * FROM t1
         UNION ALL
-        SELECT * FROM t1)
+        SELECT * FROM t1) T
 -- !query 2 schema
 struct<c1:int,c2:string>
 -- !query 2 output
@@ -38,7 +38,7 @@ FROM   (SELECT * FROM t1
         UNION ALL
         SELECT * FROM t2
         UNION ALL
-        SELECT * FROM t2)
+        SELECT * FROM t2) T
 -- !query 3 schema
 struct<c1:decimal(11,1),c2:string>
 -- !query 3 output

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index e66fe97..4114f7a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -631,13 +631,13 @@ class CachedTableSuite extends QueryTest with 
SQLTestUtils with SharedSQLContext
       val ds2 =
         sql(
           """
-            |SELECT * FROM (SELECT max(c1) FROM t1 GROUP BY c1)
+            |SELECT * FROM (SELECT max(c1) as c1 FROM t1 GROUP BY c1) tt
             |WHERE
-            |c1 = (SELECT max(c1) FROM t2 GROUP BY c1)
+            |tt.c1 = (SELECT max(c1) FROM t2 GROUP BY c1)
             |OR
             |EXISTS (SELECT c1 FROM t3)
             |OR
-            |c1 IN (SELECT c1 FROM t4)
+            |tt.c1 IN (SELECT c1 FROM t4)
           """.stripMargin)
       assert(getNumInMemoryRelations(ds2) == 4)
     }
@@ -788,25 +788,25 @@ class CachedTableSuite extends QueryTest with 
SQLTestUtils with SharedSQLContext
       // Scalar subquery and predicate subquery
       sql(
         """
-          |SELECT * FROM (SELECT max(c1) FROM t1 GROUP BY c1)
+          |SELECT * FROM (SELECT max(c1) as c1 FROM t1 GROUP BY c1) tt
           |WHERE
-          |c1 = (SELECT max(c1) FROM t2 GROUP BY c1)
+          |tt.c1 = (SELECT max(c1) FROM t2 GROUP BY c1)
           |OR
           |EXISTS (SELECT c1 FROM t3)
           |OR
-          |c1 IN (SELECT c1 FROM t4)
+          |tt.c1 IN (SELECT c1 FROM t4)
         """.stripMargin).cache()
 
       val cachedDs2 =
         sql(
           """
-            |SELECT * FROM (SELECT max(c1) FROM t1 GROUP BY c1)
+            |SELECT * FROM (SELECT max(c1) as c1 FROM t1 GROUP BY c1) tt
             |WHERE
-            |c1 = (SELECT max(c1) FROM t2 GROUP BY c1)
+            |tt.c1 = (SELECT max(c1) FROM t2 GROUP BY c1)
             |OR
             |EXISTS (SELECT c1 FROM t3)
             |OR
-            |c1 IN (SELECT c1 FROM t4)
+            |tt.c1 IN (SELECT c1 FROM t4)
           """.stripMargin)
       assert(getNumInMemoryRelations(cachedDs2) == 1)
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/7463a88b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
index a01eb2a..4629a8c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
@@ -112,7 +112,7 @@ class SubquerySuite extends QueryTest with SharedSQLContext 
{
           |   with t4 as (select 1 as d, 3 as e)
           |   select * from t4 cross join t2 where t2.b = t4.d
           | )
-          | select a from (select 1 as a union all select 2 as a)
+          | select a from (select 1 as a union all select 2 as a) t
           | where a = (select max(d) from t3)
         """.stripMargin),
       Array(Row(1))
@@ -606,8 +606,8 @@ class SubquerySuite extends QueryTest with SharedSQLContext 
{
             |    select cntPlusOne + 1 as cntPlusTwo from (
             |        select cnt + 1 as cntPlusOne from (
             |            select sum(r.c) s, count(*) cnt from r where l.a = 
r.c having cnt = 0
-            |        )
-            |    )
+            |        ) t1
+            |    ) t2
             |) = 2""".stripMargin),
       Row(1) :: Row(1) :: Row(null) :: Row(null) :: Nil)
   }
@@ -655,7 +655,7 @@ class SubquerySuite extends QueryTest with SharedSQLContext 
{
           """
             | select c1 from onerow t1
             | where exists (select 1
-            |               from   (select 1 from onerow t2 LIMIT 1)
+            |               from   (select 1 as c1 from onerow t2 LIMIT 1) t2
             |               where  t1.c1=t2.c1)""".stripMargin),
         Row(1) :: Nil)
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to