Repository: spark
Updated Branches:
  refs/heads/master a6cfa3f38 -> 557d6e322


[SPARK-17713][SQL] Move row-datasource related tests out of JDBCSuite

## What changes were proposed in this pull request?

As a followup for https://github.com/apache/spark/pull/15273 we should move 
non-JDBC specific tests out of that suite.

## How was this patch tested?

Ran the test.

Author: Eric Liang <e...@databricks.com>

Closes #15287 from ericl/spark-17713.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/557d6e32
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/557d6e32
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/557d6e32

Branch: refs/heads/master
Commit: 557d6e32272dee4eaa0f426cc3e2f82ea361c3da
Parents: a6cfa3f
Author: Eric Liang <e...@databricks.com>
Authored: Wed Sep 28 16:20:49 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Wed Sep 28 16:20:49 2016 -0700

----------------------------------------------------------------------
 .../RowDataSourceStrategySuite.scala            | 72 ++++++++++++++++++++
 .../org/apache/spark/sql/jdbc/JDBCSuite.scala   |  8 ---
 2 files changed, 72 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/557d6e32/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
new file mode 100644
index 0000000..d9afa46
--- /dev/null
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.datasources
+
+import java.sql.DriverManager
+import java.util.Properties
+
+import org.scalatest.BeforeAndAfter
+
+import org.apache.spark.SparkFunSuite
+import org.apache.spark.sql.{DataFrame, Row}
+import org.apache.spark.sql.sources._
+import org.apache.spark.sql.test.SharedSQLContext
+import org.apache.spark.sql.types._
+import org.apache.spark.util.Utils
+
+class RowDataSourceStrategySuite extends SparkFunSuite with BeforeAndAfter 
with SharedSQLContext {
+  import testImplicits._
+
+  val url = "jdbc:h2:mem:testdb0"
+  val urlWithUserAndPass = 
"jdbc:h2:mem:testdb0;user=testUser;password=testPass"
+  var conn: java.sql.Connection = null
+
+  before {
+    Utils.classForName("org.h2.Driver")
+    // Extra properties that will be specified for our database. We need these 
to test
+    // usage of parameters from OPTIONS clause in queries.
+    val properties = new Properties()
+    properties.setProperty("user", "testUser")
+    properties.setProperty("password", "testPass")
+    properties.setProperty("rowId", "false")
+
+    conn = DriverManager.getConnection(url, properties)
+    conn.prepareStatement("create schema test").executeUpdate()
+    conn.prepareStatement("create table test.inttypes (a INT, b INT, c 
INT)").executeUpdate()
+    conn.prepareStatement("insert into test.inttypes values (1, 2, 
3)").executeUpdate()
+    conn.commit()
+    sql(
+      s"""
+        |CREATE TEMPORARY TABLE inttypes
+        |USING org.apache.spark.sql.jdbc
+        |OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', 
password 'testPass')
+      """.stripMargin.replaceAll("\n", " "))
+  }
+
+  after {
+    conn.close()
+  }
+
+  test("SPARK-17673: Exchange reuse respects differences in output schema") {
+    val df = sql("SELECT * FROM inttypes")
+    val df1 = df.groupBy("a").agg("b" -> "min")
+    val df2 = df.groupBy("a").agg("c" -> "min")
+    val res = df1.union(df2)
+    assert(res.distinct().count() == 2)  // would be 1 if the exchange was 
incorrectly reused
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/557d6e32/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index c94cb3b..10f15ca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -791,12 +791,4 @@ class JDBCSuite extends SparkFunSuite
     val schema = JdbcUtils.schemaString(df, "jdbc:mysql://localhost:3306/temp")
     assert(schema.contains("`order` TEXT"))
   }
-
-  test("SPARK-17673: Exchange reuse respects differences in output schema") {
-    val df = sql("SELECT * FROM inttypes WHERE a IS NOT NULL")
-    val df1 = df.groupBy("a").agg("c" -> "min")
-    val df2 = df.groupBy("a").agg("d" -> "min")
-    val res = df1.union(df2)
-    assert(res.distinct().count() == 2)  // would be 1 if the exchange was 
incorrectly reused
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to