[1/2] spark git commit: [SPARK-5166][SPARK-5247][SPARK-5258][SQL] API Cleanup / Documentation

2015-02-17 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/master c76da36c2 - c74b07fa9


http://git-wip-us.apache.org/repos/asf/spark/blob/c74b07fa/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
index 89920f2..4f38110 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
@@ -143,7 +143,7 @@ class MySQLDatabase {
   }
 
   test(Basic test) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), tbl)
+val rdd = TestSQLContext.jdbc(url(ip, foo), tbl)
 val rows = rdd.collect
 assert(rows.length == 2)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -153,7 +153,7 @@ class MySQLDatabase {
   }
 
   test(Numeric types) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), numbers)
+val rdd = TestSQLContext.jdbc(url(ip, foo), numbers)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -181,7 +181,7 @@ class MySQLDatabase {
   }
 
   test(Date types) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), dates)
+val rdd = TestSQLContext.jdbc(url(ip, foo), dates)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -199,7 +199,7 @@ class MySQLDatabase {
   }
 
   test(String types) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), strings)
+val rdd = TestSQLContext.jdbc(url(ip, foo), strings)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -225,9 +225,9 @@ class MySQLDatabase {
   }
 
   test(Basic write test) {
-val rdd1 = TestSQLContext.jdbcRDD(url(ip, foo), numbers)
-val rdd2 = TestSQLContext.jdbcRDD(url(ip, foo), dates)
-val rdd3 = TestSQLContext.jdbcRDD(url(ip, foo), strings)
+val rdd1 = TestSQLContext.jdbc(url(ip, foo), numbers)
+val rdd2 = TestSQLContext.jdbc(url(ip, foo), dates)
+val rdd3 = TestSQLContext.jdbc(url(ip, foo), strings)
 rdd1.createJDBCTable(url(ip, foo), numberscopy, false)
 rdd2.createJDBCTable(url(ip, foo), datescopy, false)
 rdd3.createJDBCTable(url(ip, foo), stringscopy, false)

http://git-wip-us.apache.org/repos/asf/spark/blob/c74b07fa/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
index c174d7a..7b47fee 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
@@ -113,7 +113,7 @@ class PostgresDatabase {
   }
 
   test(Type mapping for various types) {
-val rdd = TestSQLContext.jdbcRDD(url(db.ip), public.bar)
+val rdd = TestSQLContext.jdbc(url(db.ip), public.bar)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -142,7 +142,7 @@ class PostgresDatabase {
   }
 
   test(Basic write test) {
-val rdd = TestSQLContext.jdbcRDD(url(db.ip), public.bar)
+val rdd = TestSQLContext.jdbc(url(db.ip), public.bar)
 rdd.createJDBCTable(url(db.ip), public.barcopy, false)
 // Test only that it doesn't bomb out.
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/c74b07fa/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
--
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
index bfacc51..07b5a84 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
@@ -29,9 +29,9 @@ import org.apache.spark.sql.hive.HiveShim
 import org.apache.spark.sql.SQLContext
 
 /**
- * Implementation for describe [extended] table.
- *
  * :: DeveloperApi ::
+ *
+ * Implementation for describe [extended] table.
  */
 @DeveloperApi
 case class DescribeHiveTableCommand(

http://git-wip-us.apache.org/repos/asf/spark/blob/c74b07fa/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala
--
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala 

[1/2] spark git commit: [SPARK-5166][SPARK-5247][SPARK-5258][SQL] API Cleanup / Documentation

2015-02-17 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/branch-1.3 97cb568a2 - cd3d41587


http://git-wip-us.apache.org/repos/asf/spark/blob/cd3d4158/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
index 89920f2..4f38110 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegration.scala
@@ -143,7 +143,7 @@ class MySQLDatabase {
   }
 
   test(Basic test) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), tbl)
+val rdd = TestSQLContext.jdbc(url(ip, foo), tbl)
 val rows = rdd.collect
 assert(rows.length == 2)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -153,7 +153,7 @@ class MySQLDatabase {
   }
 
   test(Numeric types) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), numbers)
+val rdd = TestSQLContext.jdbc(url(ip, foo), numbers)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -181,7 +181,7 @@ class MySQLDatabase {
   }
 
   test(Date types) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), dates)
+val rdd = TestSQLContext.jdbc(url(ip, foo), dates)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -199,7 +199,7 @@ class MySQLDatabase {
   }
 
   test(String types) {
-val rdd = TestSQLContext.jdbcRDD(url(ip, foo), strings)
+val rdd = TestSQLContext.jdbc(url(ip, foo), strings)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -225,9 +225,9 @@ class MySQLDatabase {
   }
 
   test(Basic write test) {
-val rdd1 = TestSQLContext.jdbcRDD(url(ip, foo), numbers)
-val rdd2 = TestSQLContext.jdbcRDD(url(ip, foo), dates)
-val rdd3 = TestSQLContext.jdbcRDD(url(ip, foo), strings)
+val rdd1 = TestSQLContext.jdbc(url(ip, foo), numbers)
+val rdd2 = TestSQLContext.jdbc(url(ip, foo), dates)
+val rdd3 = TestSQLContext.jdbc(url(ip, foo), strings)
 rdd1.createJDBCTable(url(ip, foo), numberscopy, false)
 rdd2.createJDBCTable(url(ip, foo), datescopy, false)
 rdd3.createJDBCTable(url(ip, foo), stringscopy, false)

http://git-wip-us.apache.org/repos/asf/spark/blob/cd3d4158/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
index c174d7a..7b47fee 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegration.scala
@@ -113,7 +113,7 @@ class PostgresDatabase {
   }
 
   test(Type mapping for various types) {
-val rdd = TestSQLContext.jdbcRDD(url(db.ip), public.bar)
+val rdd = TestSQLContext.jdbc(url(db.ip), public.bar)
 val rows = rdd.collect
 assert(rows.length == 1)
 val types = rows(0).toSeq.map(x = x.getClass.toString)
@@ -142,7 +142,7 @@ class PostgresDatabase {
   }
 
   test(Basic write test) {
-val rdd = TestSQLContext.jdbcRDD(url(db.ip), public.bar)
+val rdd = TestSQLContext.jdbc(url(db.ip), public.bar)
 rdd.createJDBCTable(url(db.ip), public.barcopy, false)
 // Test only that it doesn't bomb out.
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/cd3d4158/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
--
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
index bfacc51..07b5a84 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala
@@ -29,9 +29,9 @@ import org.apache.spark.sql.hive.HiveShim
 import org.apache.spark.sql.SQLContext
 
 /**
- * Implementation for describe [extended] table.
- *
  * :: DeveloperApi ::
+ *
+ * Implementation for describe [extended] table.
  */
 @DeveloperApi
 case class DescribeHiveTableCommand(

http://git-wip-us.apache.org/repos/asf/spark/blob/cd3d4158/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala
--
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala