Repository: spark
Updated Branches:
  refs/heads/master ee1c1f3a0 -> 61b427d4b


http://git-wip-us.apache.org/repos/asf/spark/blob/61b427d4/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
 
b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
index de586ba..86d21f4 100644
--- 
a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
+++ 
b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
@@ -18,7 +18,6 @@
 package org.apache.spark.sql.api.java;
 
 import java.io.Serializable;
-import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -31,6 +30,7 @@ import org.junit.Test;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
+import org.apache.spark.sql.*;
 import org.apache.spark.sql.types.*;
 
 // The test suite itself is Serializable so that anonymous Function 
implementations can be
@@ -38,12 +38,12 @@ import org.apache.spark.sql.types.*;
 // see http://stackoverflow.com/questions/758570/.
 public class JavaApplySchemaSuite implements Serializable {
   private transient JavaSparkContext javaCtx;
-  private transient JavaSQLContext javaSqlCtx;
+  private transient SQLContext javaSqlCtx;
 
   @Before
   public void setUp() {
     javaCtx = new JavaSparkContext("local", "JavaApplySchemaSuite");
-    javaSqlCtx = new JavaSQLContext(javaCtx);
+    javaSqlCtx = new SQLContext(javaCtx);
   }
 
   @After
@@ -89,7 +89,7 @@ public class JavaApplySchemaSuite implements Serializable {
     JavaRDD<Row> rowRDD = javaCtx.parallelize(personList).map(
       new Function<Person, Row>() {
         public Row call(Person person) throws Exception {
-          return Row.create(person.getName(), person.getAge());
+          return RowFactory.create(person.getName(), person.getAge());
         }
       });
 
@@ -98,15 +98,15 @@ public class JavaApplySchemaSuite implements Serializable {
     fields.add(DataTypes.createStructField("age", DataTypes.IntegerType, 
false));
     StructType schema = DataTypes.createStructType(fields);
 
-    JavaSchemaRDD schemaRDD = javaSqlCtx.applySchema(rowRDD, schema);
+    SchemaRDD schemaRDD = javaSqlCtx.applySchema(rowRDD.rdd(), schema);
     schemaRDD.registerTempTable("people");
-    List<Row> actual = javaSqlCtx.sql("SELECT * FROM people").collect();
+    Row[] actual = javaSqlCtx.sql("SELECT * FROM people").collect();
 
     List<Row> expected = new ArrayList<Row>(2);
-    expected.add(Row.create("Michael", 29));
-    expected.add(Row.create("Yin", 28));
+    expected.add(RowFactory.create("Michael", 29));
+    expected.add(RowFactory.create("Yin", 28));
 
-    Assert.assertEquals(expected, actual);
+    Assert.assertEquals(expected, Arrays.asList(actual));
   }
 
   @Test
@@ -129,8 +129,8 @@ public class JavaApplySchemaSuite implements Serializable {
     StructType expectedSchema = DataTypes.createStructType(fields);
     List<Row> expectedResult = new ArrayList<Row>(2);
     expectedResult.add(
-      Row.create(
-        new BigDecimal("92233720368547758070"),
+      RowFactory.create(
+        scala.math.BigDecimal$.MODULE$.apply("92233720368547758070"),
         true,
         1.7976931348623157E308,
         10,
@@ -138,8 +138,8 @@ public class JavaApplySchemaSuite implements Serializable {
         null,
         "this is a simple string."));
     expectedResult.add(
-      Row.create(
-        new BigDecimal("92233720368547758069"),
+      RowFactory.create(
+        scala.math.BigDecimal$.MODULE$.apply("92233720368547758069"),
         false,
         1.7976931348623157E305,
         11,
@@ -147,18 +147,18 @@ public class JavaApplySchemaSuite implements Serializable 
{
         null,
         "this is another simple string."));
 
-    JavaSchemaRDD schemaRDD1 = javaSqlCtx.jsonRDD(jsonRDD);
+    SchemaRDD schemaRDD1 = javaSqlCtx.jsonRDD(jsonRDD.rdd());
     StructType actualSchema1 = schemaRDD1.schema();
     Assert.assertEquals(expectedSchema, actualSchema1);
     schemaRDD1.registerTempTable("jsonTable1");
-    List<Row> actual1 = javaSqlCtx.sql("select * from jsonTable1").collect();
+    List<Row> actual1 = javaSqlCtx.sql("select * from 
jsonTable1").collectAsList();
     Assert.assertEquals(expectedResult, actual1);
 
-    JavaSchemaRDD schemaRDD2 = javaSqlCtx.jsonRDD(jsonRDD, expectedSchema);
+    SchemaRDD schemaRDD2 = javaSqlCtx.jsonRDD(jsonRDD.rdd(), expectedSchema);
     StructType actualSchema2 = schemaRDD2.schema();
     Assert.assertEquals(expectedSchema, actualSchema2);
     schemaRDD2.registerTempTable("jsonTable2");
-    List<Row> actual2 = javaSqlCtx.sql("select * from jsonTable2").collect();
+    List<Row> actual2 = javaSqlCtx.sql("select * from 
jsonTable2").collectAsList();
     Assert.assertEquals(expectedResult, actual2);
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/61b427d4/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaRowSuite.java
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaRowSuite.java 
b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaRowSuite.java
index 2b58121..fbfcd3f 100644
--- a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaRowSuite.java
+++ b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaRowSuite.java
@@ -29,6 +29,9 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.RowFactory;
+
 public class JavaRowSuite {
   private byte byteValue;
   private short shortValue;
@@ -61,7 +64,7 @@ public class JavaRowSuite {
 
   @Test
   public void constructSimpleRow() {
-    Row simpleRow = Row.create(
+    Row simpleRow = RowFactory.create(
       byteValue,                 // ByteType
       new Byte(byteValue),
       shortValue,                // ShortType
@@ -137,7 +140,7 @@ public class JavaRowSuite {
     simpleMap.put(stringValue + " (3)", longValue - 2);
 
     // Simple struct
-    Row simpleStruct = Row.create(
+    Row simpleStruct = RowFactory.create(
       doubleValue, stringValue, timestampValue, null);
 
     // Complex array
@@ -150,7 +153,7 @@ public class JavaRowSuite {
     complexMap.put(arrayOfRows, simpleStruct);
 
     // Complex struct
-    Row complexStruct = Row.create(
+    Row complexStruct = RowFactory.create(
       simpleStringArray,
       simpleMap,
       simpleStruct,
@@ -167,7 +170,7 @@ public class JavaRowSuite {
     Assert.assertEquals(null, complexStruct.get(6));
 
     // A very complex row
-    Row complexRow = Row.create(arrayOfMaps, arrayOfRows, complexMap, 
complexStruct);
+    Row complexRow = RowFactory.create(arrayOfMaps, arrayOfRows, complexMap, 
complexStruct);
     Assert.assertEquals(arrayOfMaps, complexRow.get(0));
     Assert.assertEquals(arrayOfRows, complexRow.get(1));
     Assert.assertEquals(complexMap, complexRow.get(2));

http://git-wip-us.apache.org/repos/asf/spark/blob/61b427d4/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaUserDefinedTypeSuite.java
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaUserDefinedTypeSuite.java
 
b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaUserDefinedTypeSuite.java
deleted file mode 100644
index 0caa821..0000000
--- 
a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaUserDefinedTypeSuite.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.api.java;
-
-import java.io.Serializable;
-import java.util.*;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.sql.MyDenseVector;
-import org.apache.spark.sql.MyLabeledPoint;
-
-public class JavaUserDefinedTypeSuite implements Serializable {
-  private transient JavaSparkContext javaCtx;
-  private transient JavaSQLContext javaSqlCtx;
-
-  @Before
-  public void setUp() {
-    javaCtx = new JavaSparkContext("local", "JavaUserDefinedTypeSuite");
-    javaSqlCtx = new JavaSQLContext(javaCtx);
-  }
-
-  @After
-  public void tearDown() {
-    javaCtx.stop();
-    javaCtx = null;
-    javaSqlCtx = null;
-  }
-
-  @Test
-  public void useScalaUDT() {
-    List<MyLabeledPoint> points = Arrays.asList(
-        new MyLabeledPoint(1.0, new MyDenseVector(new double[]{0.1, 1.0})),
-        new MyLabeledPoint(0.0, new MyDenseVector(new double[]{0.2, 2.0})));
-    JavaRDD<MyLabeledPoint> pointsRDD = javaCtx.parallelize(points);
-
-    JavaSchemaRDD schemaRDD = javaSqlCtx.applySchema(pointsRDD, 
MyLabeledPoint.class);
-    schemaRDD.registerTempTable("points");
-
-    List<Row> actualLabelRows = javaSqlCtx.sql("SELECT label FROM 
points").collect();
-    List<Double> actualLabels = new LinkedList<Double>();
-    for (Row r : actualLabelRows) {
-      actualLabels.add(r.getDouble(0));
-    }
-    for (MyLabeledPoint lp : points) {
-      Assert.assertTrue(actualLabels.contains(lp.label()));
-    }
-
-    List<Row> actualFeatureRows = javaSqlCtx.sql("SELECT features FROM 
points").collect();
-    List<MyDenseVector> actualFeatures = new LinkedList<MyDenseVector>();
-    for (Row r : actualFeatureRows) {
-      actualFeatures.add((MyDenseVector)r.get(0));
-    }
-    for (MyLabeledPoint lp : points) {
-      Assert.assertTrue(actualFeatures.contains(lp.features()));
-    }
-
-    List<Row> actual = javaSqlCtx.sql("SELECT label, features FROM 
points").collect();
-    List<MyLabeledPoint> actualPoints =
-        new LinkedList<MyLabeledPoint>();
-    for (Row r : actual) {
-      actualPoints.add(new MyLabeledPoint(r.getDouble(0), 
(MyDenseVector)r.get(1)));
-    }
-    for (MyLabeledPoint lp : points) {
-      Assert.assertTrue(actualPoints.contains(lp));
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/61b427d4/sql/core/src/test/scala/org/apache/spark/sql/api/java/JavaSQLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/api/java/JavaSQLSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/api/java/JavaSQLSuite.scala
deleted file mode 100644
index fdbb428..0000000
--- a/sql/core/src/test/scala/org/apache/spark/sql/api/java/JavaSQLSuite.scala
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.api.java
-
-import scala.beans.BeanProperty
-
-import org.scalatest.FunSuite
-
-import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.test.TestSQLContext
-import org.apache.spark.sql.types.NullType
-
-// Implicits
-import scala.collection.JavaConversions._
-
-class PersonBean extends Serializable {
-  @BeanProperty
-  var name: String = _
-
-  @BeanProperty
-  var age: Int = _
-}
-
-class AllTypesBean extends Serializable {
-  @BeanProperty var stringField: String = _
-  @BeanProperty var intField: java.lang.Integer = _
-  @BeanProperty var longField: java.lang.Long = _
-  @BeanProperty var floatField: java.lang.Float = _
-  @BeanProperty var doubleField: java.lang.Double = _
-  @BeanProperty var shortField: java.lang.Short = _
-  @BeanProperty var byteField: java.lang.Byte = _
-  @BeanProperty var booleanField: java.lang.Boolean = _
-  @BeanProperty var dateField: java.sql.Date = _
-  @BeanProperty var timestampField: java.sql.Timestamp = _
-  @BeanProperty var bigDecimalField: java.math.BigDecimal = _
-}
-
-class JavaSQLSuite extends FunSuite {
-  val javaCtx = new JavaSparkContext(TestSQLContext.sparkContext)
-  val javaSqlCtx = new JavaSQLContext(javaCtx)
-
-  test("schema from JavaBeans") {
-    val person = new PersonBean
-    person.setName("Michael")
-    person.setAge(29)
-
-    val rdd = javaCtx.parallelize(person :: Nil)
-    val schemaRDD = javaSqlCtx.applySchema(rdd, classOf[PersonBean])
-
-    schemaRDD.registerTempTable("people")
-    javaSqlCtx.sql("SELECT * FROM people").collect()
-  }
-
-  test("schema with null from JavaBeans") {
-    val person = new PersonBean
-    person.setName("Michael")
-    person.setAge(29)
-
-    val rdd = javaCtx.parallelize(person :: Nil)
-    val schemaRDD = javaSqlCtx.applySchema(rdd, classOf[PersonBean])
-
-    schemaRDD.registerTempTable("people")
-    val nullRDD = javaSqlCtx.sql("SELECT null FROM people")
-    val structFields = nullRDD.schema.fields
-    assert(structFields.size == 1)
-    assert(structFields(0).dataType === NullType)
-    assert(nullRDD.collect().head.row === Seq(null))
-  }
-
-  test("all types in JavaBeans") {
-    val bean = new AllTypesBean
-    bean.setStringField("")
-    bean.setIntField(0)
-    bean.setLongField(0)
-    bean.setFloatField(0.0F)
-    bean.setDoubleField(0.0)
-    bean.setShortField(0.toShort)
-    bean.setByteField(0.toByte)
-    bean.setBooleanField(false)
-    bean.setDateField(java.sql.Date.valueOf("2014-10-10"))
-    bean.setTimestampField(java.sql.Timestamp.valueOf("2014-10-10 00:00:00.0"))
-    bean.setBigDecimalField(new java.math.BigDecimal(0))
-
-    val rdd = javaCtx.parallelize(bean :: Nil)
-    val schemaRDD = javaSqlCtx.applySchema(rdd, classOf[AllTypesBean])
-    schemaRDD.registerTempTable("allTypes")
-
-    assert(
-      javaSqlCtx.sql(
-        """
-          |SELECT stringField, intField, longField, floatField, doubleField, 
shortField, byteField,
-          |       booleanField, dateField, timestampField, bigDecimalField
-          |FROM allTypes
-        """.stripMargin).collect.head.row ===
-      Seq("", 0, 0L, 0F, 0.0, 0.toShort, 0.toByte, false, 
java.sql.Date.valueOf("2014-10-10"),
-        java.sql.Timestamp.valueOf("2014-10-10 00:00:00.0"), 
scala.math.BigDecimal(0)))
-  }
-
-  test("decimal types in JavaBeans") {
-    val bean = new AllTypesBean
-    bean.setStringField("")
-    bean.setIntField(0)
-    bean.setLongField(0)
-    bean.setFloatField(0.0F)
-    bean.setDoubleField(0.0)
-    bean.setShortField(0.toShort)
-    bean.setByteField(0.toByte)
-    bean.setBooleanField(false)
-    bean.setDateField(java.sql.Date.valueOf("2014-10-10"))
-    bean.setTimestampField(java.sql.Timestamp.valueOf("2014-10-10 00:00:00.0"))
-    bean.setBigDecimalField(new java.math.BigDecimal(0))
-
-    val rdd = javaCtx.parallelize(bean :: Nil)
-    val schemaRDD = javaSqlCtx.applySchema(rdd, classOf[AllTypesBean])
-    schemaRDD.registerTempTable("decimalTypes")
-
-    assert(javaSqlCtx.sql(
-      "select bigDecimalField + bigDecimalField from decimalTypes"
-    ).collect.head.row === Seq(scala.math.BigDecimal(0)))
-  }
-
-  test("all types null in JavaBeans") {
-    val bean = new AllTypesBean
-    bean.setStringField(null)
-    bean.setIntField(null)
-    bean.setLongField(null)
-    bean.setFloatField(null)
-    bean.setDoubleField(null)
-    bean.setShortField(null)
-    bean.setByteField(null)
-    bean.setBooleanField(null)
-    bean.setDateField(null)
-    bean.setTimestampField(null)
-    bean.setBigDecimalField(null)
-
-    val rdd = javaCtx.parallelize(bean :: Nil)
-    val schemaRDD = javaSqlCtx.applySchema(rdd, classOf[AllTypesBean])
-    schemaRDD.registerTempTable("allTypes")
-
-    assert(
-      javaSqlCtx.sql(
-        """
-          |SELECT stringField, intField, longField, floatField, doubleField, 
shortField, byteField,
-          |       booleanField, dateField, timestampField, bigDecimalField
-          |FROM allTypes
-        """.stripMargin).collect.head.row ===
-        Seq.fill(11)(null))
-  }
-
-  test("loads JSON datasets") {
-    val jsonString =
-      """{"string":"this is a simple string.",
-          "integer":10,
-          "long":21474836470,
-          "bigInteger":92233720368547758070,
-          "double":1.7976931348623157E308,
-          "boolean":true,
-          "null":null
-      }""".replaceAll("\n", " ")
-    val rdd = javaCtx.parallelize(jsonString :: Nil)
-
-    var schemaRDD = javaSqlCtx.jsonRDD(rdd)
-
-    schemaRDD.registerTempTable("jsonTable1")
-
-    assert(
-      javaSqlCtx.sql("select * from jsonTable1").collect.head.row ===
-        Seq(BigDecimal("92233720368547758070"),
-            true,
-            1.7976931348623157E308,
-            10,
-            21474836470L,
-            null,
-            "this is a simple string."))
-
-    val file = getTempFilePath("json")
-    val path = file.toString
-    rdd.saveAsTextFile(path)
-    schemaRDD = javaSqlCtx.jsonFile(path)
-
-    schemaRDD.registerTempTable("jsonTable2")
-
-    assert(
-      javaSqlCtx.sql("select * from jsonTable2").collect.head.row ===
-        Seq(BigDecimal("92233720368547758070"),
-            true,
-            1.7976931348623157E308,
-            10,
-            21474836470L,
-            null,
-            "this is a simple string."))
-  }
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/61b427d4/sql/hive/src/main/scala/org/apache/spark/sql/hive/api/java/JavaHiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/api/java/JavaHiveContext.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/api/java/JavaHiveContext.scala
deleted file mode 100644
index 038f63f..0000000
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/api/java/JavaHiveContext.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.hive.api.java
-
-import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.api.java.{JavaSQLContext, JavaSchemaRDD}
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.sql.hive.{HiveContext, HiveQl}
-
-/**
- * The entry point for executing Spark SQL queries from a Java program.
- */
-class JavaHiveContext(sqlContext: SQLContext) extends 
JavaSQLContext(sqlContext) {
-
-  def this(sparkContext: JavaSparkContext) = this(new 
HiveContext(sparkContext))
-
-  override def sql(sqlText: String): JavaSchemaRDD = {
-    // TODO: Create a framework for registering parsers instead of just 
hardcoding if statements.
-    if (sqlContext.conf.dialect == "sql") {
-      super.sql(sqlText)
-    } else if (sqlContext.conf.dialect == "hiveql") {
-      new JavaSchemaRDD(sqlContext, HiveQl.parseSql(sqlText))
-    }  else {
-      sys.error(s"Unsupported SQL dialect: ${sqlContext.conf.dialect}.  Try 
'sql' or 'hiveql'")
-    }
-  }
-
-  /**
-    * DEPRECATED: Use sql(...) Instead
-    */
-  @Deprecated
-  def hql(hqlQuery: String): JavaSchemaRDD =
-    new JavaSchemaRDD(sqlContext, HiveQl.parseSql(hqlQuery))
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/61b427d4/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
deleted file mode 100644
index ca78dfb..0000000
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.hive.api.java
-
-import scala.util.Try
-
-import org.scalatest.FunSuite
-
-import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.sql.api.java.{JavaSQLContext, JavaSchemaRDD}
-import org.apache.spark.sql.execution.ExplainCommand
-import org.apache.spark.sql.hive.test.TestHive
-
-// Implicits
-import scala.collection.JavaConversions._
-
-class JavaHiveQLSuite extends FunSuite {
-  lazy val javaCtx = new JavaSparkContext(TestHive.sparkContext)
-
-  // There is a little trickery here to avoid instantiating two HiveContexts 
in the same JVM
-  lazy val javaHiveCtx = new JavaHiveContext(TestHive)
-
-  test("SELECT * FROM src") {
-    assert(
-      javaHiveCtx.sql("SELECT * FROM src").collect().map(_.getInt(0)) ===
-        TestHive.sql("SELECT * FROM src").collect().map(_.getInt(0)).toSeq)
-  }
-
-  def isExplanation(result: JavaSchemaRDD) = {
-    val explanation = result.collect().map(_.getString(0))
-    explanation.size > 1 && explanation.head.startsWith("== Physical Plan ==")
-  }
-
-  test("Query Hive native command execution result") {
-    val tableName = "test_native_commands"
-
-    assertResult(0) {
-      javaHiveCtx.sql(s"DROP TABLE IF EXISTS $tableName").count()
-    }
-
-    assertResult(0) {
-      javaHiveCtx.sql(s"CREATE TABLE $tableName(key INT, value 
STRING)").count()
-    }
-
-    assert(
-      javaHiveCtx
-        .sql("SHOW TABLES")
-        .collect()
-        .map(_.getString(0))
-        .contains(tableName))
-
-    assertResult(Array(Array("key", "int"), Array("value", "string"))) {
-      javaHiveCtx
-        .sql(s"describe $tableName")
-        .collect()
-        .map(row => Array(row.get(0).asInstanceOf[String], 
row.get(1).asInstanceOf[String]))
-        .toArray
-    }
-
-    assert(isExplanation(javaHiveCtx.sql(
-      s"EXPLAIN SELECT key, COUNT(*) FROM $tableName GROUP BY key")))
-
-    TestHive.reset()
-  }
-
-  test("Exactly once semantics for DDL and command statements") {
-    val tableName = "test_exactly_once"
-    val q0 = javaHiveCtx.sql(s"CREATE TABLE $tableName(key INT, value STRING)")
-
-    // If the table was not created, the following assertion would fail
-    assert(Try(TestHive.table(tableName)).isSuccess)
-
-    // If the CREATE TABLE command got executed again, the following assertion 
would fail
-    assert(Try(q0.count()).isSuccess)
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to