Repository: spark
Updated Branches:
  refs/heads/master 668cb1def -> c7a183b2c


[SPARK-2041][SQL] Correctly analyze queries where columnName == tableName.

Author: Michael Armbrust <[email protected]>

Closes #985 from marmbrus/tableName and squashes the following commits:

3caaa27 [Michael Armbrust] Correctly analyze queries where columnName == 
tableName.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c7a183b2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c7a183b2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c7a183b2

Branch: refs/heads/master
Commit: c7a183b2c2bca13565496495b4ae3a3a9f63f9ab
Parents: 668cb1d
Author: Michael Armbrust <[email protected]>
Authored: Thu Jun 5 17:42:08 2014 -0700
Committer: Reynold Xin <[email protected]>
Committed: Thu Jun 5 17:42:08 2014 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala  | 3 ++-
 .../src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala    | 6 ++++++
 sql/core/src/test/scala/org/apache/spark/sql/TestData.scala    | 3 +++
 3 files changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c7a183b2/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index 5eb52d5..2b8fbdc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -64,7 +64,8 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] {
     // struct fields.
     val options = children.flatMap(_.output).flatMap { option =>
       // If the first part of the desired name matches a qualifier for this 
possible match, drop it.
-      val remainingParts = if (option.qualifiers contains parts.head) 
parts.drop(1) else parts
+      val remainingParts =
+        if (option.qualifiers.contains(parts.head) && parts.size > 1) 
parts.drop(1) else parts
       if (option.name == remainingParts.head) (option, 
remainingParts.tail.toList) :: Nil else Nil
     }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/c7a183b2/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index e2ad391..aa0c426 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -28,6 +28,12 @@ class SQLQuerySuite extends QueryTest {
   // Make sure the tables are loaded.
   TestData
 
+  test("SPARK-2041 column name equals tablename") {
+    checkAnswer(
+      sql("SELECT tableName FROM tableName"),
+      "test")
+  }
+
   test("index into array") {
     checkAnswer(
       sql("SELECT data, data[0], data[0] + data[1], data[0 + 1] FROM 
arrayData"),

http://git-wip-us.apache.org/repos/asf/spark/blob/c7a183b2/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index 876bd16..05de736 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -114,4 +114,7 @@ object TestData {
       NullStrings(2, "ABC") ::
       NullStrings(3, null) :: Nil)
   nullStrings.registerAsTable("nullStrings")
+
+  case class TableName(tableName: String)
+  TestSQLContext.sparkContext.parallelize(TableName("test") :: 
Nil).registerAsTable("tableName")
 }

Reply via email to