Repository: spark
Updated Branches:
  refs/heads/master 418f38d92 -> 76b472f12


[SPARK-6136] [SQL] Removed JDBC integration tests which depends on docker-client

Integration test suites in the JDBC data source (`MySQLIntegration` and 
`PostgresIntegration`) depend on docker-client 2.7.5, which transitively 
depends on Guava 17.0. Unfortunately, Guava 17.0 is causing test runtime binary 
compatibility issues when Spark is compiled against Hive 0.12.0, or Hadoop 2.4.

Considering `MySQLIntegration` and `PostgresIntegration` are ignored right now, 
I'd suggest moving them from the Spark project to the [Spark integration tests] 
[1] project. This PR removes both the JDBC data source integration tests and 
the docker-client test dependency.

[1]: |https://github.com/databricks/spark-integration-tests

<!-- Reviewable:start -->
[<img src="https://reviewable.io/review_button.png"; height=40 alt="Review on 
Reviewable"/>](https://reviewable.io/reviews/apache/spark/4872)
<!-- Reviewable:end -->

Author: Cheng Lian <l...@databricks.com>

Closes #4872 from liancheng/remove-docker-client and squashes the following 
commits:

1f4169e [Cheng Lian] Removes DockerHacks
159b24a [Cheng Lian] Removed JDBC integration tests which depends on 
docker-client


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/76b472f1
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/76b472f1
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/76b472f1

Branch: refs/heads/master
Commit: 76b472f12a57bb5bec7b3791660eb47e9177da7f
Parents: 418f38d
Author: Cheng Lian <l...@databricks.com>
Authored: Wed Mar 4 19:39:02 2015 +0800
Committer: Cheng Lian <l...@databricks.com>
Committed: Wed Mar 4 19:39:02 2015 +0800

----------------------------------------------------------------------
 sql/core/pom.xml                                |   6 -
 .../org/apache/spark/sql/jdbc/DockerHacks.scala |  51 -----
 .../spark/sql/jdbc/MySQLIntegration.scala       | 228 -------------------
 .../spark/sql/jdbc/PostgresIntegration.scala    | 147 ------------
 4 files changed, 432 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/76b472f1/sql/core/pom.xml
----------------------------------------------------------------------
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index e28baa5..d4c8c68 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -99,12 +99,6 @@
       <version>9.3-1102-jdbc41</version>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>com.spotify</groupId>
-      <artifactId>docker-client</artifactId>
-      <version>2.7.5</version>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
   <build>
     
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>

http://git-wip-us.apache.org/repos/asf/spark/blob/76b472f1/sql/core/src/test/scala/org/apache/spark/sql/jdbc/DockerHacks.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/DockerHacks.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/DockerHacks.scala
deleted file mode 100644
index f332cb3..0000000
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/DockerHacks.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import scala.collection.mutable.MutableList
-
-import com.spotify.docker.client._
-
-/**
- * A factory and morgue for DockerClient objects.  In the DockerClient we use,
- * calling close() closes the desired DockerClient but also renders all other
- * DockerClients inoperable.  This is inconvenient if we have more than one
- * open, such as during tests.
- */
-object DockerClientFactory {
-  var numClients: Int = 0
-  val zombies = new MutableList[DockerClient]()
-
-  def get(): DockerClient = {
-    this.synchronized {
-      numClients = numClients + 1
-      DefaultDockerClient.fromEnv.build()
-    }
-  }
-
-  def close(dc: DockerClient) {
-    this.synchronized {
-      numClients = numClients - 1
-      zombies += dc
-      if (numClients == 0) {
-        zombies.foreach(_.close())
-        zombies.clear()
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/76b472f1/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
deleted file mode 100644
index 5b8a76f..0000000
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import java.math.BigDecimal
-import java.sql.{Date, Timestamp}
-
-import com.spotify.docker.client.DockerClient
-import com.spotify.docker.client.messages.ContainerConfig
-import org.scalatest.{BeforeAndAfterAll, FunSuite, Ignore}
-
-import org.apache.spark.sql.test._
-
-class MySQLDatabase {
-  val docker: DockerClient = DockerClientFactory.get()
-  val containerId = {
-    println("Pulling mysql")
-    docker.pull("mysql")
-    println("Configuring container")
-    val config = ContainerConfig.builder().image("mysql")
-      .env("MYSQL_ROOT_PASSWORD=rootpass")
-      .build()
-    println("Creating container")
-    val id = docker.createContainer(config).id
-    println("Starting container " + id)
-    docker.startContainer(id)
-    id
-  }
-  val ip = docker.inspectContainer(containerId).networkSettings.ipAddress
-
-  def close() {
-    try {
-      println("Killing container " + containerId)
-      docker.killContainer(containerId)
-      println("Removing container " + containerId)
-      docker.removeContainer(containerId)
-      println("Closing docker client")
-      DockerClientFactory.close(docker)
-    } catch {
-      case e: Exception =>
-        println(e)
-        println("You may need to clean this up manually.")
-        throw e
-    }
-  }
-}
-
-@Ignore class MySQLIntegration extends FunSuite with BeforeAndAfterAll {
-  var ip: String = null
-
-  def url(ip: String): String = url(ip, "mysql")
-  def url(ip: String, db: String): String = 
s"jdbc:mysql://$ip:3306/$db?user=root&password=rootpass"
-
-  def waitForDatabase(ip: String, maxMillis: Long) {
-    println("Waiting for database to start up.")
-    val before = System.currentTimeMillis()
-    var lastException: java.sql.SQLException = null
-    while (true) {
-      if (System.currentTimeMillis() > before + maxMillis) {
-        throw new java.sql.SQLException(s"Database not up after $maxMillis 
ms.", lastException)
-      }
-      try {
-        val conn = java.sql.DriverManager.getConnection(url(ip))
-        conn.close()
-        println("Database is up.")
-        return;
-      } catch {
-        case e: java.sql.SQLException =>
-          lastException = e
-          java.lang.Thread.sleep(250)
-      }
-    }
-  }
-
-  def setupDatabase(ip: String) {
-    val conn = java.sql.DriverManager.getConnection(url(ip))
-    try {
-      conn.prepareStatement("CREATE DATABASE foo").executeUpdate()
-      conn.prepareStatement("CREATE TABLE foo.tbl (x INTEGER, y 
TEXT(8))").executeUpdate()
-      conn.prepareStatement("INSERT INTO foo.tbl VALUES 
(42,'fred')").executeUpdate()
-      conn.prepareStatement("INSERT INTO foo.tbl VALUES 
(17,'dave')").executeUpdate()
-
-      conn.prepareStatement("CREATE TABLE foo.numbers (onebit BIT(1), tenbits 
BIT(10), "
-          + "small SMALLINT, med MEDIUMINT, nor INT, big BIGINT, deci 
DECIMAL(40,20), flt FLOAT, "
-          + "dbl DOUBLE)").executeUpdate()
-      conn.prepareStatement("INSERT INTO foo.numbers VALUES (b'0', 
b'1000100101', "
-          + "17, 77777, 123456789, 123456789012345, 
123456789012345.123456789012345, "
-          + "42.75, 1.0000000000000002)").executeUpdate()
-
-      conn.prepareStatement("CREATE TABLE foo.dates (d DATE, t TIME, dt 
DATETIME, ts TIMESTAMP, "
-          + "yr YEAR)").executeUpdate()
-      conn.prepareStatement("INSERT INTO foo.dates VALUES ('1991-11-09', 
'13:31:24', "
-          + "'1996-01-01 01:23:45', '2009-02-13 23:31:30', 
'2001')").executeUpdate()
-
-      // TODO: Test locale conversion for strings.
-      conn.prepareStatement("CREATE TABLE foo.strings (a CHAR(10), b 
VARCHAR(10), c TINYTEXT, "
-          + "d TEXT, e MEDIUMTEXT, f LONGTEXT, g BINARY(4), h VARBINARY(10), i 
BLOB)"
-          ).executeUpdate()
-      conn.prepareStatement("INSERT INTO foo.strings VALUES ('the', 'quick', 
'brown', 'fox', 'jumps', 'over', 'the', 'lazy', 'dog')").executeUpdate()
-    } finally {
-      conn.close()
-    }
-  }
-
-  var db: MySQLDatabase = null
-
-  override def beforeAll() {
-    // If you load the MySQL driver here, DriverManager will deadlock.  The
-    // MySQL driver gets loaded when its jar gets loaded, unlike the Postgres
-    // and H2 drivers.
-    //Class.forName("com.mysql.jdbc.Driver")
-
-    db = new MySQLDatabase()
-    waitForDatabase(db.ip, 60000)
-    setupDatabase(db.ip)
-    ip = db.ip
-  }
-
-  override def afterAll() {
-    db.close()
-  }
-
-  test("Basic test") {
-    val df = TestSQLContext.jdbc(url(ip, "foo"), "tbl")
-    val rows = df.collect()
-    assert(rows.length == 2)
-    val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 2)
-    assert(types(0).equals("class java.lang.Integer"))
-    assert(types(1).equals("class java.lang.String"))
-  }
-
-  test("Numeric types") {
-    val df = TestSQLContext.jdbc(url(ip, "foo"), "numbers")
-    val rows = df.collect()
-    assert(rows.length == 1)
-    val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 9)
-    println(types(1))
-    assert(types(0).equals("class java.lang.Boolean"))
-    assert(types(1).equals("class java.lang.Long"))
-    assert(types(2).equals("class java.lang.Integer"))
-    assert(types(3).equals("class java.lang.Integer"))
-    assert(types(4).equals("class java.lang.Integer"))
-    assert(types(5).equals("class java.lang.Long"))
-    assert(types(6).equals("class java.math.BigDecimal"))
-    assert(types(7).equals("class java.lang.Double"))
-    assert(types(8).equals("class java.lang.Double"))
-    assert(rows(0).getBoolean(0) == false)
-    assert(rows(0).getLong(1) == 0x225)
-    assert(rows(0).getInt(2) == 17)
-    assert(rows(0).getInt(3) == 77777)
-    assert(rows(0).getInt(4) == 123456789)
-    assert(rows(0).getLong(5) == 123456789012345L)
-    val bd = new BigDecimal("123456789012345.12345678901234500000")
-    assert(rows(0).getAs[BigDecimal](6).equals(bd))
-    assert(rows(0).getDouble(7) == 42.75)
-    assert(rows(0).getDouble(8) == 1.0000000000000002)
-  }
-
-  test("Date types") {
-    val df = TestSQLContext.jdbc(url(ip, "foo"), "dates")
-    val rows = df.collect()
-    assert(rows.length == 1)
-    val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 5)
-    assert(types(0).equals("class java.sql.Date"))
-    assert(types(1).equals("class java.sql.Timestamp"))
-    assert(types(2).equals("class java.sql.Timestamp"))
-    assert(types(3).equals("class java.sql.Timestamp"))
-    assert(types(4).equals("class java.sql.Date"))
-    assert(rows(0).getAs[Date](0).equals(new Date(91, 10, 9)))
-    assert(rows(0).getAs[Timestamp](1).equals(new Timestamp(70, 0, 1, 13, 31, 
24, 0)))
-    assert(rows(0).getAs[Timestamp](2).equals(new Timestamp(96, 0, 1, 1, 23, 
45, 0)))
-    assert(rows(0).getAs[Timestamp](3).equals(new Timestamp(109, 1, 13, 23, 
31, 30, 0)))
-    assert(rows(0).getAs[Date](4).equals(new Date(101, 0, 1)))
-  }
-
-  test("String types") {
-    val df = TestSQLContext.jdbc(url(ip, "foo"), "strings")
-    val rows = df.collect()
-    assert(rows.length == 1)
-    val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 9)
-    assert(types(0).equals("class java.lang.String"))
-    assert(types(1).equals("class java.lang.String"))
-    assert(types(2).equals("class java.lang.String"))
-    assert(types(3).equals("class java.lang.String"))
-    assert(types(4).equals("class java.lang.String"))
-    assert(types(5).equals("class java.lang.String"))
-    assert(types(6).equals("class [B"))
-    assert(types(7).equals("class [B"))
-    assert(types(8).equals("class [B"))
-    assert(rows(0).getString(0).equals("the"))
-    assert(rows(0).getString(1).equals("quick"))
-    assert(rows(0).getString(2).equals("brown"))
-    assert(rows(0).getString(3).equals("fox"))
-    assert(rows(0).getString(4).equals("jumps"))
-    assert(rows(0).getString(5).equals("over"))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](6), 
Array[Byte](116, 104, 101, 0)))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](7), 
Array[Byte](108, 97, 122, 121)))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), 
Array[Byte](100, 111, 103)))
-  }
-
-  test("Basic write test") {
-    val df1 = TestSQLContext.jdbc(url(ip, "foo"), "numbers")
-    val df2 = TestSQLContext.jdbc(url(ip, "foo"), "dates")
-    val df3 = TestSQLContext.jdbc(url(ip, "foo"), "strings")
-    df1.createJDBCTable(url(ip, "foo"), "numberscopy", false)
-    df2.createJDBCTable(url(ip, "foo"), "datescopy", false)
-    df3.createJDBCTable(url(ip, "foo"), "stringscopy", false)
-  }
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/76b472f1/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
deleted file mode 100644
index e17be99..0000000
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.jdbc
-
-import java.sql.DriverManager
-
-import com.spotify.docker.client.DockerClient
-import com.spotify.docker.client.messages.ContainerConfig
-import org.scalatest.{BeforeAndAfterAll, FunSuite, Ignore}
-
-import org.apache.spark.sql.test._
-
-class PostgresDatabase {
-  val docker: DockerClient = DockerClientFactory.get()
-  val containerId = {
-    println("Pulling postgres")
-    docker.pull("postgres")
-    println("Configuring container")
-    val config = ContainerConfig.builder().image("postgres")
-      .env("POSTGRES_PASSWORD=rootpass")
-      .build()
-    println("Creating container")
-    val id = docker.createContainer(config).id
-    println("Starting container " + id)
-    docker.startContainer(id)
-    id
-  }
-  val ip = docker.inspectContainer(containerId).networkSettings.ipAddress
-
-  def close() {
-    try {
-      println("Killing container " + containerId)
-      docker.killContainer(containerId)
-      println("Removing container " + containerId)
-      docker.removeContainer(containerId)
-      println("Closing docker client")
-      DockerClientFactory.close(docker)
-    } catch {
-      case e: Exception =>
-        println(e)
-        println("You may need to clean this up manually.")
-        throw e
-    }
-  }
-}
-
-@Ignore class PostgresIntegration extends FunSuite with BeforeAndAfterAll {
-  lazy val db = new PostgresDatabase()
-
-  def url(ip: String) = 
s"jdbc:postgresql://$ip:5432/postgres?user=postgres&password=rootpass"
-
-  def waitForDatabase(ip: String, maxMillis: Long) {
-    val before = System.currentTimeMillis()
-    var lastException: java.sql.SQLException = null
-    while (true) {
-      if (System.currentTimeMillis() > before + maxMillis) {
-        throw new java.sql.SQLException(s"Database not up after $maxMillis 
ms.",
- lastException)
-      }
-      try {
-        val conn = java.sql.DriverManager.getConnection(url(ip))
-        conn.close()
-        println("Database is up.")
-        return;
-      } catch {
-        case e: java.sql.SQLException =>
-          lastException = e
-          java.lang.Thread.sleep(250)
-      }
-    }
-  }
-
-  def setupDatabase(ip: String) {
-    val conn = DriverManager.getConnection(url(ip))
-    try {
-      conn.prepareStatement("CREATE DATABASE foo").executeUpdate()
-      conn.setCatalog("foo")
-      conn.prepareStatement("CREATE TABLE bar (a text, b integer, c double 
precision, d bigint, "
-          + "e bit(1), f bit(10), g bytea, h boolean, i inet, j 
cidr)").executeUpdate()
-      conn.prepareStatement("INSERT INTO bar VALUES ('hello', 42, 1.25, 
123456789012345, B'0', "
-          + "B'1000100101', E'\\\\xDEADBEEF', true, '172.16.0.42', 
'192.168.0.0/16')").executeUpdate()
-    } finally {
-      conn.close()
-    }
-  }
-
-  override def beforeAll() {
-    println("Waiting for database to start up.")
-    waitForDatabase(db.ip, 60000)
-    println("Setting up database.")
-    setupDatabase(db.ip)
-  }
-
-  override def afterAll() {
-    db.close()
-  }
-
-  test("Type mapping for various types") {
-    val df = TestSQLContext.jdbc(url(db.ip), "public.bar")
-    val rows = df.collect()
-    assert(rows.length == 1)
-    val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 10)
-    assert(types(0).equals("class java.lang.String"))
-    assert(types(1).equals("class java.lang.Integer"))
-    assert(types(2).equals("class java.lang.Double"))
-    assert(types(3).equals("class java.lang.Long"))
-    assert(types(4).equals("class java.lang.Boolean"))
-    assert(types(5).equals("class [B"))
-    assert(types(6).equals("class [B"))
-    assert(types(7).equals("class java.lang.Boolean"))
-    assert(types(8).equals("class java.lang.String"))
-    assert(types(9).equals("class java.lang.String"))
-    assert(rows(0).getString(0).equals("hello"))
-    assert(rows(0).getInt(1) == 42)
-    assert(rows(0).getDouble(2) == 1.25)
-    assert(rows(0).getLong(3) == 123456789012345L)
-    assert(rows(0).getBoolean(4) == false)
-    // BIT(10)'s come back as ASCII strings of ten ASCII 0's and 1's...
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](5), 
Array[Byte](49,48,48,48,49,48,48,49,48,49)))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](6), 
Array[Byte](0xDE.toByte, 0xAD.toByte, 0xBE.toByte, 0xEF.toByte)))
-    assert(rows(0).getBoolean(7) == true)
-    assert(rows(0).getString(8) == "172.16.0.42")
-    assert(rows(0).getString(9) == "192.168.0.0/16")
-  }
-
-  test("Basic write test") {
-    val df = TestSQLContext.jdbc(url(db.ip), "public.bar")
-    df.createJDBCTable(url(db.ip), "public.barcopy", false)
-    // Test only that it doesn't bomb out.
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to