Repository: spark
Updated Branches:
  refs/heads/master 4c0d827cf -> 104430223


[SPARK-14589][SQL] Enhance DB2 JDBC Dialect docker tests

## What changes were proposed in this pull request?

Enhance the DB2 JDBC Dialect docker tests as they seemed to have had some 
issues on previous merge causing some tests to fail.

## How was this patch tested?

By running the integration tests locally.

Author: Luciano Resende <lrese...@apache.org>

Closes #12348 from lresende/SPARK-14589.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/10443022
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/10443022
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/10443022

Branch: refs/heads/master
Commit: 104430223eb62a7946f939fbf97242c636adbebe
Parents: 4c0d827
Author: Luciano Resende <lrese...@apache.org>
Authored: Thu May 5 10:54:48 2016 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Thu May 5 10:54:48 2016 +0100

----------------------------------------------------------------------
 .../spark/sql/jdbc/DB2IntegrationSuite.scala    | 89 ++++++++------------
 project/SparkBuild.scala                        |  2 +-
 2 files changed, 34 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/10443022/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
----------------------------------------------------------------------
diff --git 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
index 4fe1ef6..3da34b1 100644
--- 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
+++ 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
@@ -34,10 +34,10 @@ class DB2IntegrationSuite extends 
DockerJDBCIntegrationSuite {
       "DB2INST1_PASSWORD" -> "rootpass",
       "LICENSE" -> "accept"
     )
-    override val usesIpc = true
+    override val usesIpc = false
     override val jdbcPort: Int = 50000
     override def getJdbcUrl(ip: String, port: Int): String =
-      s"jdbc:db2://$ip:$port/foo:user=db2inst1;password=rootpass;"
+      
s"jdbc:db2://$ip:$port/foo:user=db2inst1;password=rootpass;retrieveMessagesFromServerOnGetMessage=true;"
 //scalastyle:ignore
     override def getStartupProcessName: Option[String] = Some("db2start")
   }
 
@@ -46,22 +46,19 @@ class DB2IntegrationSuite extends 
DockerJDBCIntegrationSuite {
     conn.prepareStatement("INSERT INTO tbl VALUES (42,'fred')").executeUpdate()
     conn.prepareStatement("INSERT INTO tbl VALUES (17,'dave')").executeUpdate()
 
-    conn.prepareStatement("CREATE TABLE numbers (onebit BIT(1), tenbits 
BIT(10), "
-      + "small SMALLINT, med MEDIUMINT, nor INT, big BIGINT, deci 
DECIMAL(40,20), flt FLOAT, "
-      + "dbl DOUBLE)").executeUpdate()
-    conn.prepareStatement("INSERT INTO numbers VALUES (b'0', b'1000100101', "
-      + "17, 77777, 123456789, 123456789012345, 
123456789012345.123456789012345, "
-      + "42.75, 1.0000000000000002)").executeUpdate()
+    conn.prepareStatement("CREATE TABLE numbers ( small SMALLINT, med INTEGER, 
big BIGINT, "
+      + "deci DECIMAL(31,20), flt FLOAT, dbl DOUBLE)").executeUpdate()
+    conn.prepareStatement("INSERT INTO numbers VALUES (17, 77777, 
922337203685477580, "
+      + "123456745.56789012345000000000, 42.75, 5.4E-70)").executeUpdate()
 
-    conn.prepareStatement("CREATE TABLE dates (d DATE, t TIME, dt DATETIME, ts 
TIMESTAMP, "
-      + "yr YEAR)").executeUpdate()
+    conn.prepareStatement("CREATE TABLE dates (d DATE, t TIME, ts TIMESTAMP 
)").executeUpdate()
     conn.prepareStatement("INSERT INTO dates VALUES ('1991-11-09', '13:31:24', 
"
-      + "'1996-01-01 01:23:45', '2009-02-13 23:31:30', 
'2001')").executeUpdate()
+      + "'2009-02-13 23:31:30')").executeUpdate()
 
     // TODO: Test locale conversion for strings.
-    conn.prepareStatement("CREATE TABLE strings (a CHAR(10), b VARCHAR(10), c 
CLOB, d BLOB, "
-      + "e CHAR FOR BIT DATA)").executeUpdate()
-    conn.prepareStatement("INSERT INTO strings VALUES ('the', 'quick', 
'brown', 'fox', 'jumps'")
+    conn.prepareStatement("CREATE TABLE strings (a CHAR(10), b VARCHAR(10), c 
CLOB, d BLOB)")
+      .executeUpdate()
+    conn.prepareStatement("INSERT INTO strings VALUES ('the', 'quick', 
'brown', BLOB('fox'))")
       .executeUpdate()
   }
 
@@ -80,26 +77,20 @@ class DB2IntegrationSuite extends 
DockerJDBCIntegrationSuite {
     val rows = df.collect()
     assert(rows.length == 1)
     val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 9)
-    assert(types(0).equals("class java.lang.Boolean"))
-    assert(types(1).equals("class java.lang.Long"))
-    assert(types(2).equals("class java.lang.Integer"))
-    assert(types(3).equals("class java.lang.Integer"))
-    assert(types(4).equals("class java.lang.Integer"))
-    assert(types(5).equals("class java.lang.Long"))
-    assert(types(6).equals("class java.math.BigDecimal"))
-    assert(types(7).equals("class java.lang.Double"))
-    assert(types(8).equals("class java.lang.Double"))
-    assert(rows(0).getBoolean(0) == false)
-    assert(rows(0).getLong(1) == 0x225)
-    assert(rows(0).getInt(2) == 17)
-    assert(rows(0).getInt(3) == 77777)
-    assert(rows(0).getInt(4) == 123456789)
-    assert(rows(0).getLong(5) == 123456789012345L)
-    val bd = new BigDecimal("123456789012345.12345678901234500000")
-    assert(rows(0).getAs[BigDecimal](6).equals(bd))
-    assert(rows(0).getDouble(7) == 42.75)
-    assert(rows(0).getDouble(8) == 1.0000000000000002)
+    assert(types.length == 6)
+    assert(types(0).equals("class java.lang.Integer"))
+    assert(types(1).equals("class java.lang.Integer"))
+    assert(types(2).equals("class java.lang.Long"))
+    assert(types(3).equals("class java.math.BigDecimal"))
+    assert(types(4).equals("class java.lang.Double"))
+    assert(types(5).equals("class java.lang.Double"))
+    assert(rows(0).getInt(0) == 17)
+    assert(rows(0).getInt(1) == 77777)
+    assert(rows(0).getLong(2) == 922337203685477580L)
+    val bd = new BigDecimal("123456745.56789012345000000000")
+    assert(rows(0).getAs[BigDecimal](3).equals(bd))
+    assert(rows(0).getDouble(4) == 42.75)
+    assert(rows(0).getDouble(5) == 5.4E-70)
   }
 
   test("Date types") {
@@ -107,17 +98,13 @@ class DB2IntegrationSuite extends 
DockerJDBCIntegrationSuite {
     val rows = df.collect()
     assert(rows.length == 1)
     val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 5)
+    assert(types.length == 3)
     assert(types(0).equals("class java.sql.Date"))
     assert(types(1).equals("class java.sql.Timestamp"))
     assert(types(2).equals("class java.sql.Timestamp"))
-    assert(types(3).equals("class java.sql.Timestamp"))
-    assert(types(4).equals("class java.sql.Date"))
     assert(rows(0).getAs[Date](0).equals(Date.valueOf("1991-11-09")))
     assert(rows(0).getAs[Timestamp](1).equals(Timestamp.valueOf("1970-01-01 
13:31:24")))
-    assert(rows(0).getAs[Timestamp](2).equals(Timestamp.valueOf("1996-01-01 
01:23:45")))
-    assert(rows(0).getAs[Timestamp](3).equals(Timestamp.valueOf("2009-02-13 
23:31:30")))
-    assert(rows(0).getAs[Date](4).equals(Date.valueOf("2001-01-01")))
+    assert(rows(0).getAs[Timestamp](2).equals(Timestamp.valueOf("2009-02-13 
23:31:30")))
   }
 
   test("String types") {
@@ -125,32 +112,22 @@ class DB2IntegrationSuite extends 
DockerJDBCIntegrationSuite {
     val rows = df.collect()
     assert(rows.length == 1)
     val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types.length == 9)
+    assert(types.length == 4)
     assert(types(0).equals("class java.lang.String"))
     assert(types(1).equals("class java.lang.String"))
     assert(types(2).equals("class java.lang.String"))
-    assert(types(3).equals("class java.lang.String"))
-    assert(types(4).equals("class java.lang.String"))
-    assert(types(5).equals("class java.lang.String"))
-    assert(types(6).equals("class [B"))
-    assert(types(7).equals("class [B"))
-    assert(types(8).equals("class [B"))
-    assert(rows(0).getString(0).equals("the"))
+    assert(types(3).equals("class [B"))
+    assert(rows(0).getString(0).equals("the       "))
     assert(rows(0).getString(1).equals("quick"))
     assert(rows(0).getString(2).equals("brown"))
-    assert(rows(0).getString(3).equals("fox"))
-    assert(rows(0).getString(4).equals("jumps"))
-    assert(rows(0).getString(5).equals("over"))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](6), 
Array[Byte](116, 104, 101, 0)))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](7), 
Array[Byte](108, 97, 122, 121)))
-    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), 
Array[Byte](100, 111, 103)))
+    assert(java.util.Arrays.equals(rows(0).getAs[Array[Byte]](3), 
Array[Byte](102, 111, 120)))
   }
 
   test("Basic write test") {
-    val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties)
+    // val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties)
     val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties)
     val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties)
-    df1.write.jdbc(jdbcUrl, "numberscopy", new Properties)
+    // df1.write.jdbc(jdbcUrl, "numberscopy", new Properties)
     df2.write.jdbc(jdbcUrl, "datescopy", new Properties)
     df3.write.jdbc(jdbcUrl, "stringscopy", new Properties)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/10443022/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index f7781e0..9249248 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -453,7 +453,7 @@ object DockerIntegrationTests {
   // This serves to override the override specified in DependencyOverrides:
   lazy val settings = Seq(
     dependencyOverrides += "com.google.guava" % "guava" % "18.0",
-    resolvers ++= Seq("DB2" at 
"https://app.camunda.com/nexus/content/repositories/public/";),
+    resolvers += "DB2" at 
"https://app.camunda.com/nexus/content/repositories/public/";,
     libraryDependencies += "com.oracle" % "ojdbc6" % "11.2.0.1.0" from 
"https://app.camunda.com/nexus/content/repositories/public/com/oracle/ojdbc6/11.2.0.1.0/ojdbc6-11.2.0.1.0.jar";
 // scalastyle:ignore
   )
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to