http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 110f51a..567d7fa 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -20,15 +20,18 @@ package org.apache.spark.sql.hive
 import java.io.File
 import java.net.{URL, URLClassLoader}
 import java.sql.Timestamp
+import java.util.concurrent.TimeUnit
 
 import scala.collection.JavaConversions._
 import scala.collection.mutable.HashMap
 import scala.language.implicitConversions
+import scala.concurrent.duration._
 
 import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.hadoop.hive.common.StatsSetupConst
 import org.apache.hadoop.hive.common.`type`.HiveDecimal
 import org.apache.hadoop.hive.conf.HiveConf
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars
 import org.apache.hadoop.hive.ql.metadata.Table
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution
 import org.apache.hadoop.hive.ql.session.SessionState
@@ -165,6 +168,16 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) 
with Logging {
   SessionState.setCurrentSessionState(executionHive.state)
 
   /**
+   * Overrides default Hive configurations to avoid breaking changes to Spark 
SQL users.
+   *  - allow SQL11 keywords to be used as identifiers
+   */
+  private[sql] def defaultOverides() = {
+    setConf(ConfVars.HIVE_SUPPORT_SQL11_RESERVED_KEYWORDS.varname, "false")
+  }
+
+  defaultOverides()
+
+  /**
    * The copy of the Hive client that is used to retrieve metadata from the 
Hive MetaStore.
    * The version of the Hive client that is used here must match the metastore 
that is configured
    * in the hive-site.xml file.
@@ -252,6 +265,10 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) 
with Logging {
   }
 
   protected[sql] override def parseSql(sql: String): LogicalPlan = {
+    var state = SessionState.get()
+    if (state == null) {
+      
SessionState.setCurrentSessionState(tlSession.get().asInstanceOf[SQLSession].sessionState)
+    }
     super.parseSql(substitutor.substitute(hiveconf, sql))
   }
 
@@ -298,10 +315,21 @@ class HiveContext(sc: SparkContext) extends 
SQLContext(sc) with Logging {
         // Can we use fs.getContentSummary in future?
         // Seems fs.getContentSummary returns wrong table size on Jenkins. So 
we use
         // countFileSize to count the table size.
+        val stagingDir = 
metadataHive.getConf(HiveConf.ConfVars.STAGINGDIR.varname,
+          HiveConf.ConfVars.STAGINGDIR.defaultStrVal)
+
         def calculateTableSize(fs: FileSystem, path: Path): Long = {
           val fileStatus = fs.getFileStatus(path)
           val size = if (fileStatus.isDir) {
-            fs.listStatus(path).map(status => calculateTableSize(fs, 
status.getPath)).sum
+            fs.listStatus(path)
+              .map { status =>
+                if (!status.getPath().getName().startsWith(stagingDir)) {
+                  calculateTableSize(fs, status.getPath)
+                } else {
+                  0L
+                }
+              }
+              .sum
           } else {
             fileStatus.getLen
           }
@@ -398,7 +426,58 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) 
with Logging {
   }
 
   /** Overridden by child classes that need to set configuration before the 
client init. */
-  protected def configure(): Map[String, String] = Map.empty
+  protected def configure(): Map[String, String] = {
+    // Hive 0.14.0 introduces timeout operations in HiveConf, and changes 
default values of a bunch
+    // of time `ConfVar`s by adding time suffixes (`s`, `ms`, and `d` etc.).  
This breaks backwards-
+    // compatibility when users are trying to connecting to a Hive metastore 
of lower version,
+    // because these options are expected to be integral values in lower 
versions of Hive.
+    //
+    // Here we enumerate all time `ConfVar`s and convert their values to 
numeric strings according
+    // to their output time units.
+    Seq(
+      ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY -> TimeUnit.SECONDS,
+      ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.METASTORE_CLIENT_SOCKET_LIFETIME -> TimeUnit.SECONDS,
+      ConfVars.HMSHANDLERINTERVAL -> TimeUnit.MILLISECONDS,
+      ConfVars.METASTORE_EVENT_DB_LISTENER_TTL -> TimeUnit.SECONDS,
+      ConfVars.METASTORE_EVENT_CLEAN_FREQ -> TimeUnit.SECONDS,
+      ConfVars.METASTORE_EVENT_EXPIRY_DURATION -> TimeUnit.SECONDS,
+      ConfVars.METASTORE_AGGREGATE_STATS_CACHE_TTL -> TimeUnit.SECONDS,
+      ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_WRITER_WAIT -> 
TimeUnit.MILLISECONDS,
+      ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_READER_WAIT -> 
TimeUnit.MILLISECONDS,
+      ConfVars.HIVES_AUTO_PROGRESS_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS_INTERVAL -> 
TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_STATS_JDBC_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_STATS_RETRIES_WAIT -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES -> TimeUnit.SECONDS,
+      ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME -> 
TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_TXN_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_COMPACTOR_CHECK_INTERVAL -> TimeUnit.SECONDS,
+      ConfVars.HIVE_COMPACTOR_CLEANER_RUN_INTERVAL -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME -> 
TimeUnit.SECONDS,
+      ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE -> TimeUnit.SECONDS,
+      ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH -> 
TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME -> TimeUnit.SECONDS,
+      ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME -> TimeUnit.SECONDS,
+      ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_SERVER2_IDLE_SESSION_TIMEOUT -> TimeUnit.MILLISECONDS,
+      ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT -> TimeUnit.MILLISECONDS,
+      ConfVars.SERVER_READ_SOCKET_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.HIVE_LOCALIZE_RESOURCE_WAIT_INTERVAL -> TimeUnit.MILLISECONDS,
+      ConfVars.SPARK_CLIENT_FUTURE_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.SPARK_JOB_MONITOR_TIMEOUT -> TimeUnit.SECONDS,
+      ConfVars.SPARK_RPC_CLIENT_CONNECT_TIMEOUT -> TimeUnit.MILLISECONDS,
+      ConfVars.SPARK_RPC_CLIENT_HANDSHAKE_TIMEOUT -> TimeUnit.MILLISECONDS
+    ).map { case (confVar, unit) =>
+      confVar.varname -> hiveconf.getTimeVar(confVar, unit).toString
+    }.toMap
+  }
 
   protected[hive] class SQLSession extends super.SQLSession {
     protected[sql] override lazy val conf: SQLConf = new SQLConf {
@@ -515,19 +594,23 @@ class HiveContext(sc: SparkContext) extends 
SQLContext(sc) with Logging {
 
 private[hive] object HiveContext {
   /** The version of hive used internally by Spark SQL. */
-  val hiveExecutionVersion: String = "0.13.1"
+  val hiveExecutionVersion: String = "1.2.1"
 
   val HIVE_METASTORE_VERSION: String = "spark.sql.hive.metastore.version"
   val HIVE_METASTORE_JARS = stringConf("spark.sql.hive.metastore.jars",
     defaultValue = Some("builtin"),
-    doc = "Location of the jars that should be used to instantiate the 
HiveMetastoreClient. This" +
-      " property can be one of three options: " +
-      "1. \"builtin\" Use Hive 0.13.1, which is bundled with the Spark 
assembly jar when " +
-      "<code>-Phive</code> is enabled. When this option is chosen, " +
-      "spark.sql.hive.metastore.version must be either <code>0.13.1</code> or 
not defined. " +
-      "2. \"maven\" Use Hive jars of specified version downloaded from Maven 
repositories." +
-      "3. A classpath in the standard format for both Hive and Hadoop.")
-
+    doc = s"""
+      | Location of the jars that should be used to instantiate the 
HiveMetastoreClient.
+      | This property can be one of three options: "
+      | 1. "builtin"
+      |   Use Hive ${hiveExecutionVersion}, which is bundled with the Spark 
assembly jar when
+      |   <code>-Phive</code> is enabled. When this option is chosen,
+      |   <code>spark.sql.hive.metastore.version</code> must be either
+      |   <code>${hiveExecutionVersion}</code> or not defined.
+      | 2. "maven"
+      |   Use Hive jars of specified version downloaded from Maven 
repositories.
+      | 3. A classpath in the standard format for both Hive and Hadoop.
+    """.stripMargin)
   val CONVERT_METASTORE_PARQUET = 
booleanConf("spark.sql.hive.convertMetastoreParquet",
     defaultValue = Some(true),
     doc = "When set to false, Spark SQL will use the Hive SerDe for parquet 
tables instead of " +
@@ -566,17 +649,18 @@ private[hive] object HiveContext {
   /** Constructs a configuration for hive, where the metastore is located in a 
temp directory. */
   def newTemporaryConfiguration(): Map[String, String] = {
     val tempDir = Utils.createTempDir()
-    val localMetastore = new File(tempDir, "metastore").getAbsolutePath
+    val localMetastore = new File(tempDir, "metastore")
     val propMap: HashMap[String, String] = HashMap()
     // We have to mask all properties in hive-site.xml that relates to 
metastore data source
     // as we used a local metastore here.
     HiveConf.ConfVars.values().foreach { confvar =>
       if (confvar.varname.contains("datanucleus") || 
confvar.varname.contains("jdo")) {
-        propMap.put(confvar.varname, confvar.defaultVal)
+        propMap.put(confvar.varname, confvar.getDefaultExpr())
       }
     }
-    propMap.put("javax.jdo.option.ConnectionURL",
-      s"jdbc:derby:;databaseName=$localMetastore;create=true")
+    propMap.put(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, 
localMetastore.toURI.toString)
+    propMap.put(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
+      
s"jdbc:derby:;databaseName=${localMetastore.getAbsolutePath};create=true")
     propMap.put("datanucleus.rdbms.datastoreAdapterClassName",
       "org.datanucleus.store.rdbms.adapter.DerbyAdapter")
     propMap.toMap

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index a8c9b4f..16c1866 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -649,11 +649,12 @@ private[hive] case class MetastoreRelation
     table.outputFormat.foreach(sd.setOutputFormat)
 
     val serdeInfo = new org.apache.hadoop.hive.metastore.api.SerDeInfo
-    sd.setSerdeInfo(serdeInfo)
     table.serde.foreach(serdeInfo.setSerializationLib)
+    sd.setSerdeInfo(serdeInfo)
+
     val serdeParameters = new java.util.HashMap[String, String]()
-    serdeInfo.setParameters(serdeParameters)
     table.serdeProperties.foreach { case (k, v) => serdeParameters.put(k, v) }
+    serdeInfo.setParameters(serdeParameters)
 
     new Table(tTable)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index e6df64d..e2fdfc6 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.hive
 
 import java.sql.Date
+import java.util.Locale
 
 import org.apache.hadoop.hive.conf.HiveConf
 import org.apache.hadoop.hive.serde.serdeConstants
@@ -80,6 +81,7 @@ private[hive] object HiveQl extends Logging {
     "TOK_ALTERDATABASE_PROPERTIES",
     "TOK_ALTERINDEX_PROPERTIES",
     "TOK_ALTERINDEX_REBUILD",
+    "TOK_ALTERTABLE",
     "TOK_ALTERTABLE_ADDCOLS",
     "TOK_ALTERTABLE_ADDPARTS",
     "TOK_ALTERTABLE_ALTERPARTS",
@@ -94,6 +96,7 @@ private[hive] object HiveQl extends Logging {
     "TOK_ALTERTABLE_SKEWED",
     "TOK_ALTERTABLE_TOUCH",
     "TOK_ALTERTABLE_UNARCHIVE",
+    "TOK_ALTERVIEW",
     "TOK_ALTERVIEW_ADDPARTS",
     "TOK_ALTERVIEW_AS",
     "TOK_ALTERVIEW_DROPPARTS",
@@ -248,7 +251,7 @@ private[hive] object HiveQl extends Logging {
      * Otherwise, there will be Null pointer exception,
      * when retrieving properties form HiveConf.
      */
-    val hContext = new Context(hiveConf)
+    val hContext = new Context(SessionState.get().getConf())
     val node = ParseUtils.findRootNonNullToken((new ParseDriver).parse(sql, 
hContext))
     hContext.clear()
     node
@@ -577,12 +580,7 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
             "TOK_TABLESKEWED", // Skewed by
             "TOK_TABLEROWFORMAT",
             "TOK_TABLESERIALIZER",
-            "TOK_FILEFORMAT_GENERIC", // For file formats not natively 
supported by Hive.
-            "TOK_TBLSEQUENCEFILE", // Stored as SequenceFile
-            "TOK_TBLTEXTFILE", // Stored as TextFile
-            "TOK_TBLRCFILE", // Stored as RCFile
-            "TOK_TBLORCFILE", // Stored as ORC File
-            "TOK_TBLPARQUETFILE", // Stored as PARQUET
+            "TOK_FILEFORMAT_GENERIC",
             "TOK_TABLEFILEFORMAT", // User-provided InputFormat and 
OutputFormat
             "TOK_STORAGEHANDLER", // Storage handler
             "TOK_TABLELOCATION",
@@ -706,36 +704,51 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
             tableDesc = tableDesc.copy(serdeProperties = 
tableDesc.serdeProperties ++ serdeParams)
           }
         case Token("TOK_FILEFORMAT_GENERIC", child :: Nil) =>
-          throw new SemanticException(
-            "Unrecognized file format in STORED AS clause:${child.getText}")
+          child.getText().toLowerCase(Locale.ENGLISH) match {
+            case "orc" =>
+              tableDesc = tableDesc.copy(
+                inputFormat = 
Option("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"),
+                outputFormat = 
Option("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"))
+              if (tableDesc.serde.isEmpty) {
+                tableDesc = tableDesc.copy(
+                  serde = Option("org.apache.hadoop.hive.ql.io.orc.OrcSerde"))
+              }
 
-        case Token("TOK_TBLRCFILE", Nil) =>
-          tableDesc = tableDesc.copy(
-            inputFormat = 
Option("org.apache.hadoop.hive.ql.io.RCFileInputFormat"),
-            outputFormat = 
Option("org.apache.hadoop.hive.ql.io.RCFileOutputFormat"))
-          if (tableDesc.serde.isEmpty) {
-            tableDesc = tableDesc.copy(
-              serde = 
Option("org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe"))
-          }
+            case "parquet" =>
+              tableDesc = tableDesc.copy(
+                inputFormat =
+                  
Option("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat"),
+                outputFormat =
+                  
Option("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat"))
+              if (tableDesc.serde.isEmpty) {
+                tableDesc = tableDesc.copy(
+                  serde = 
Option("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"))
+              }
 
-        case Token("TOK_TBLORCFILE", Nil) =>
-          tableDesc = tableDesc.copy(
-            inputFormat = 
Option("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"),
-            outputFormat = 
Option("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"))
-          if (tableDesc.serde.isEmpty) {
-            tableDesc = tableDesc.copy(
-              serde = Option("org.apache.hadoop.hive.ql.io.orc.OrcSerde"))
-          }
+            case "rcfile" =>
+              tableDesc = tableDesc.copy(
+                inputFormat = 
Option("org.apache.hadoop.hive.ql.io.RCFileInputFormat"),
+                outputFormat = 
Option("org.apache.hadoop.hive.ql.io.RCFileOutputFormat"))
+              if (tableDesc.serde.isEmpty) {
+                tableDesc = tableDesc.copy(
+                  serde = 
Option("org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe"))
+              }
 
-        case Token("TOK_TBLPARQUETFILE", Nil) =>
-          tableDesc = tableDesc.copy(
-            inputFormat =
-              
Option("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat"),
-            outputFormat =
-              
Option("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat"))
-          if (tableDesc.serde.isEmpty) {
-            tableDesc = tableDesc.copy(
-              serde = 
Option("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"))
+            case "textfile" =>
+              tableDesc = tableDesc.copy(
+                inputFormat =
+                  Option("org.apache.hadoop.mapred.TextInputFormat"),
+                outputFormat =
+                  
Option("org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat"))
+
+            case "sequencefile" =>
+              tableDesc = tableDesc.copy(
+                inputFormat = 
Option("org.apache.hadoop.mapred.SequenceFileInputFormat"),
+                outputFormat = 
Option("org.apache.hadoop.mapred.SequenceFileOutputFormat"))
+
+            case _ =>
+              throw new SemanticException(
+                s"Unrecognized file format in STORED AS clause: 
${child.getText}")
           }
 
         case Token("TOK_TABLESERIALIZER",
@@ -751,7 +764,7 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
 
         case Token("TOK_TABLEPROPERTIES", list :: Nil) =>
           tableDesc = tableDesc.copy(properties = tableDesc.properties ++ 
getProperties(list))
-        case list @ Token("TOK_TABLEFILEFORMAT", _) =>
+        case list @ Token("TOK_TABLEFILEFORMAT", children) =>
           tableDesc = tableDesc.copy(
             inputFormat =
               
Option(BaseSemanticAnalyzer.unescapeSQLString(list.getChild(0).getText)),
@@ -889,7 +902,8 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
                 Token("TOK_TABLEPROPLIST", propsClause) :: Nil) :: Nil) :: Nil 
=>
                 val serdeProps = propsClause.map {
                   case Token("TOK_TABLEPROPERTY", Token(name, Nil) :: 
Token(value, Nil) :: Nil) =>
-                    (name, value)
+                    (BaseSemanticAnalyzer.unescapeSQLString(name),
+                      BaseSemanticAnalyzer.unescapeSQLString(value))
                 }
                 (Nil, 
Some(BaseSemanticAnalyzer.unescapeSQLString(serdeClass)), serdeProps)
 
@@ -1037,10 +1051,11 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
       // return With plan if there is CTE
       cteRelations.map(With(query, _)).getOrElse(query)
 
-    case Token("TOK_UNION", left :: right :: Nil) => Union(nodeToPlan(left), 
nodeToPlan(right))
+    // HIVE-9039 renamed TOK_UNION => TOK_UNIONALL while adding 
TOK_UNIONDISTINCT
+    case Token("TOK_UNIONALL", left :: right :: Nil) => 
Union(nodeToPlan(left), nodeToPlan(right))
 
     case a: ASTNode =>
-      throw new NotImplementedError(s"No parse rules for:\n 
${dumpTree(a).toString} ")
+      throw new NotImplementedError(s"No parse rules for $node:\n 
${dumpTree(a).toString} ")
   }
 
   val allJoinTokens = "(TOK_.*JOIN)".r
@@ -1251,7 +1266,8 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
       InsertIntoTable(UnresolvedRelation(tableIdent, None), partitionKeys, 
query, overwrite, true)
 
     case a: ASTNode =>
-      throw new NotImplementedError(s"No parse rules for:\n 
${dumpTree(a).toString} ")
+      throw new NotImplementedError(s"No parse rules for ${a.getName}:" +
+          s"\n ${dumpTree(a).toString} ")
   }
 
   protected def selExprNodeToExpr(node: Node): Option[Expression] = node match 
{
@@ -1274,7 +1290,8 @@ 
https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
     case Token("TOK_HINTLIST", _) => None
 
     case a: ASTNode =>
-      throw new NotImplementedError(s"No parse rules for:\n 
${dumpTree(a).toString} ")
+      throw new NotImplementedError(s"No parse rules for ${a.getName }:" +
+          s"\n ${dumpTree(a).toString } ")
   }
 
   protected val escapedIdentifier = "`([^`]+)`".r

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
index a357bb3..267074f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
@@ -20,6 +20,8 @@ package org.apache.spark.sql.hive
 import java.io.{InputStream, OutputStream}
 import java.rmi.server.UID
 
+import org.apache.avro.Schema
+
 /* Implicit conversions */
 import scala.collection.JavaConversions._
 import scala.language.implicitConversions
@@ -33,7 +35,7 @@ import org.apache.hadoop.fs.Path
 import org.apache.hadoop.hive.ql.exec.{UDF, Utilities}
 import org.apache.hadoop.hive.ql.plan.{FileSinkDesc, TableDesc}
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils
-import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable
+import org.apache.hadoop.hive.serde2.avro.{AvroGenericRecordWritable, 
AvroSerdeUtils}
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector
 import org.apache.hadoop.io.Writable
 
@@ -82,10 +84,19 @@ private[hive] object HiveShim {
    * Bug introduced in hive-0.13. AvroGenericRecordWritable has a member 
recordReaderID that
    * is needed to initialize before serialization.
    */
-  def prepareWritable(w: Writable): Writable = {
+  def prepareWritable(w: Writable, serDeProps: Seq[(String, String)]): 
Writable = {
     w match {
       case w: AvroGenericRecordWritable =>
         w.setRecordReaderID(new UID())
+        // In Hive 1.1, the record's schema may need to be initialized 
manually or a NPE will
+        // be thrown.
+        if (w.getFileSchema() == null) {
+          serDeProps
+            .find(_._1 == 
AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName())
+            .foreach { kv =>
+              w.setFileSchema(new Schema.Parser().parse(kv._2))
+            }
+        }
       case _ =>
     }
     w

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
index d834b4e..a82e152 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
@@ -87,6 +87,10 @@ private[hive] case class HiveTable(
  * shared classes.
  */
 private[hive] trait ClientInterface {
+
+  /** Returns the configuration for the given key in the current session. */
+  def getConf(key: String, defaultValue: String): String
+
   /**
    * Runs a HiveQL command using Hive, returning the results as a list of 
strings.  Each row will
    * result in one string.

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
index 6e0912d..dc372be 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
@@ -38,7 +38,6 @@ import org.apache.spark.sql.catalyst.expressions.Expression
 import org.apache.spark.sql.execution.QueryExecutionException
 import org.apache.spark.util.{CircularBuffer, Utils}
 
-
 /**
  * A class that wraps the HiveClient and converts its responses to externally 
visible classes.
  * Note that this class is typically loaded with an internal classloader for 
each instantiation,
@@ -115,6 +114,10 @@ private[hive] class ClientWrapper(
   /** Returns the configuration for the current session. */
   def conf: HiveConf = SessionState.get().getConf
 
+  override def getConf(key: String, defaultValue: String): String = {
+    conf.get(key, defaultValue)
+  }
+
   // TODO: should be a def?s
   // When we create this val client, the HiveConf of it (conf) is the one 
associated with state.
   @GuardedBy("this")

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
index 956997e..6e826ce 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
@@ -512,7 +512,7 @@ private[client] class Shim_v1_2 extends Shim_v1_1 {
       listBucketingEnabled: Boolean): Unit = {
     loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, 
replace: JBoolean,
       numDP: JInteger, holdDDLTime: JBoolean, listBucketingEnabled: JBoolean, 
JBoolean.FALSE,
-      0: JLong)
+      0L: JLong)
   }
 
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
index 97fb981..f58bc7d 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
@@ -55,7 +55,7 @@ private[hive] object IsolatedClientLoader {
     case "14" | "0.14" | "0.14.0" => hive.v14
     case "1.0" | "1.0.0" => hive.v1_0
     case "1.1" | "1.1.0" => hive.v1_1
-    case "1.2" | "1.2.0" => hive.v1_2
+    case "1.2" | "1.2.0" | "1.2.1" => hive.v1_2
   }
 
   private def downloadVersion(version: HiveVersion, ivyPath: Option[String]): 
Seq[URL] = {

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
index b48082f..0503691 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
@@ -56,7 +56,7 @@ package object client {
         "net.hydromatic:linq4j",
         "net.hydromatic:quidem"))
 
-    case object v1_2 extends HiveVersion("1.2.0",
+    case object v1_2 extends HiveVersion("1.2.1",
       exclusions = Seq("eigenbase:eigenbase-properties",
         "org.apache.curator:*",
         "org.pentaho:pentaho-aggdesigner-algorithm",

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
index 40a6a32..12c667e 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
@@ -129,7 +129,7 @@ case class InsertIntoHiveTable(
     // instances within the closure, since Serializer is not serializable 
while TableDesc is.
     val tableDesc = table.tableDesc
     val tableLocation = table.hiveQlTable.getDataLocation
-    val tmpLocation = hiveContext.getExternalTmpPath(tableLocation.toUri)
+    val tmpLocation = hiveContext.getExternalTmpPath(tableLocation)
     val fileSinkConf = new FileSinkDesc(tmpLocation.toString, tableDesc, false)
     val isCompressed = sc.hiveconf.getBoolean(
       ConfVars.COMPRESSRESULT.varname, ConfVars.COMPRESSRESULT.defaultBoolVal)

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/ScriptTransformation.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/ScriptTransformation.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/ScriptTransformation.scala
index 7e3342c..fbb8640 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/ScriptTransformation.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/ScriptTransformation.scala
@@ -247,7 +247,7 @@ private class ScriptTransformationWriterThread(
         } else {
           val writable = inputSerde.serialize(
             row.asInstanceOf[GenericInternalRow].values, inputSoi)
-          prepareWritable(writable).write(dataOutputStream)
+          prepareWritable(writable, 
ioschema.outputSerdeProps).write(dataOutputStream)
         }
       }
       outputStream.close()
@@ -345,9 +345,7 @@ case class HiveScriptIOSchema (
 
     val columnTypesNames = 
columnTypes.map(_.toTypeInfo.getTypeName()).mkString(",")
 
-    var propsMap = serdeProps.map(kv => {
-      (kv._1.split("'")(1), kv._2.split("'")(1))
-    }).toMap + (serdeConstants.LIST_COLUMNS -> columns.mkString(","))
+    var propsMap = serdeProps.toMap + (serdeConstants.LIST_COLUMNS -> 
columns.mkString(","))
     propsMap = propsMap + (serdeConstants.LIST_COLUMN_TYPES -> 
columnTypesNames)
 
     val properties = new Properties()

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index abe5c69..8a86a87 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -249,7 +249,7 @@ private[spark] object ResolveHiveWindowFunction extends 
Rule[LogicalPlan] {
           // Get the class of this function.
           // In Hive 0.12, there is no windowFunctionInfo.getFunctionClass. 
So, we use
           // windowFunctionInfo.getfInfo().getFunctionClass for both Hive 0.13 
and Hive 0.13.1.
-          val functionClass = windowFunctionInfo.getfInfo().getFunctionClass
+          val functionClass = windowFunctionInfo.getFunctionClass()
           val newChildren =
             // Rank(), DENSE_RANK(), CUME_DIST(), and PERCENT_RANK() do not 
take explicit
             // input parameters and requires implicit parameters, which

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
index 8850e06..684ea1d 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala
@@ -171,7 +171,7 @@ private[spark] class 
SparkHiveDynamicPartitionWriterContainer(
   import SparkHiveDynamicPartitionWriterContainer._
 
   private val defaultPartName = jobConf.get(
-    ConfVars.DEFAULTPARTITIONNAME.varname, 
ConfVars.DEFAULTPARTITIONNAME.defaultVal)
+    ConfVars.DEFAULTPARTITIONNAME.varname, 
ConfVars.DEFAULTPARTITIONNAME.defaultStrVal)
 
   @transient private var writers: mutable.HashMap[String, 
FileSinkOperator.RecordWriter] = _
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFilters.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFilters.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFilters.scala
index ddd5d24..86142e5 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFilters.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFilters.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.hive.orc
 
 import org.apache.hadoop.hive.common.`type`.{HiveChar, HiveDecimal, 
HiveVarchar}
-import org.apache.hadoop.hive.ql.io.sarg.SearchArgument
+import org.apache.hadoop.hive.ql.io.sarg.{SearchArgumentFactory, 
SearchArgument}
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.Builder
 import org.apache.hadoop.hive.serde2.io.DateWritable
 
@@ -33,13 +33,13 @@ import org.apache.spark.sql.sources._
 private[orc] object OrcFilters extends Logging {
   def createFilter(expr: Array[Filter]): Option[SearchArgument] = {
     expr.reduceOption(And).flatMap { conjunction =>
-      val builder = SearchArgument.FACTORY.newBuilder()
+      val builder = SearchArgumentFactory.newBuilder()
       buildSearchArgument(conjunction, builder).map(_.build())
     }
   }
 
   private def buildSearchArgument(expression: Filter, builder: Builder): 
Option[Builder] = {
-    def newBuilder = SearchArgument.FACTORY.newBuilder()
+    def newBuilder = SearchArgumentFactory.newBuilder()
 
     def isSearchableLiteral(value: Any): Boolean = value match {
       // These are types recognized by the 
`SearchArgumentImpl.BuilderImpl.boxLiteral()` method.

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index 7bbdef9..8d0bf46 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -20,29 +20,25 @@ package org.apache.spark.sql.hive.test
 import java.io.File
 import java.util.{Set => JavaSet}
 
-import org.apache.hadoop.hive.conf.HiveConf
+import scala.collection.mutable
+import scala.language.implicitConversions
+
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry
 import org.apache.hadoop.hive.ql.io.avro.{AvroContainerInputFormat, 
AvroContainerOutputFormat}
-import org.apache.hadoop.hive.ql.metadata.Table
-import org.apache.hadoop.hive.ql.parse.VariableSubstitution
 import org.apache.hadoop.hive.ql.processors._
 import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
 import org.apache.hadoop.hive.serde2.avro.AvroSerDe
 
-import org.apache.spark.sql.catalyst.CatalystConf
+import org.apache.spark.sql.SQLConf
 import org.apache.spark.sql.catalyst.analysis._
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.CacheTableCommand
 import org.apache.spark.sql.hive._
 import org.apache.spark.sql.hive.execution.HiveNativeCommand
-import org.apache.spark.sql.SQLConf
 import org.apache.spark.util.Utils
 import org.apache.spark.{SparkConf, SparkContext}
 
-import scala.collection.mutable
-import scala.language.implicitConversions
-
 /* Implicit conversions */
 import scala.collection.JavaConversions._
 
@@ -83,15 +79,25 @@ class TestHiveContext(sc: SparkContext) extends 
HiveContext(sc) {
 
   hiveconf.set("hive.plan.serialization.format", "javaXML")
 
-  lazy val warehousePath = Utils.createTempDir()
+  lazy val warehousePath = Utils.createTempDir(namePrefix = "warehouse-")
+
+  lazy val scratchDirPath = {
+    val dir = Utils.createTempDir(namePrefix = "scratch-")
+    dir.delete()
+    dir
+  }
 
   private lazy val temporaryConfig = newTemporaryConfiguration()
 
   /** Sets up the system initially or after a RESET command */
-  protected override def configure(): Map[String, String] =
-    temporaryConfig ++ Map(
-      ConfVars.METASTOREWAREHOUSE.varname -> warehousePath.toString,
-      ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN.varname -> "true")
+  protected override def configure(): Map[String, String] = {
+    super.configure() ++ temporaryConfig ++ Map(
+      ConfVars.METASTOREWAREHOUSE.varname -> warehousePath.toURI.toString,
+      ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN.varname -> "true",
+      ConfVars.SCRATCHDIR.varname -> scratchDirPath.toURI.toString,
+      ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY.varname -> "1"
+    )
+  }
 
   val testTempDir = Utils.createTempDir()
 
@@ -244,7 +250,6 @@ class TestHiveContext(sc: SparkContext) extends 
HiveContext(sc) {
     }),
     TestTable("src_thrift", () => {
       import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer
-      import org.apache.hadoop.hive.serde2.thrift.test.Complex
       import org.apache.hadoop.mapred.{SequenceFileInputFormat, 
SequenceFileOutputFormat}
       import org.apache.thrift.protocol.TBinaryProtocol
 
@@ -253,7 +258,7 @@ class TestHiveContext(sc: SparkContext) extends 
HiveContext(sc) {
          |CREATE TABLE src_thrift(fake INT)
          |ROW FORMAT SERDE '${classOf[ThriftDeserializer].getName}'
          |WITH SERDEPROPERTIES(
-         |  'serialization.class'='${classOf[Complex].getName}',
+         |  'serialization.class'='org.apache.spark.sql.hive.test.Complex',
          |  'serialization.format'='${classOf[TBinaryProtocol].getName}'
          |)
          |STORED AS
@@ -437,6 +442,7 @@ class TestHiveContext(sc: SparkContext) extends 
HiveContext(sc) {
         case (k, v) =>
           metadataHive.runSqlHive(s"SET $k=$v")
       }
+      defaultOverides()
 
       runSqlHive("USE default")
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java 
b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
new file mode 100644
index 0000000..e010112
--- /dev/null
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
@@ -0,0 +1,1139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.hive.test;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.hadoop.hive.serde2.thrift.test.IntString;
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.EncodingUtils;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+
+/**
+ * This is a fork of Hive 0.13's 
org/apache/hadoop/hive/serde2/thrift/test/Complex.java, which
+ * does not contain union fields that are not supported by Spark SQL.
+ */
+
+@SuppressWarnings({"ALL", "unchecked"})
+public class Complex implements org.apache.thrift.TBase<Complex, 
Complex._Fields>, java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("Complex");
+
+  private static final org.apache.thrift.protocol.TField AINT_FIELD_DESC = new 
org.apache.thrift.protocol.TField("aint", org.apache.thrift.protocol.TType.I32, 
(short)1);
+  private static final org.apache.thrift.protocol.TField A_STRING_FIELD_DESC = 
new org.apache.thrift.protocol.TField("aString", 
org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField LINT_FIELD_DESC = new 
org.apache.thrift.protocol.TField("lint", 
org.apache.thrift.protocol.TType.LIST, (short)3);
+  private static final org.apache.thrift.protocol.TField L_STRING_FIELD_DESC = 
new org.apache.thrift.protocol.TField("lString", 
org.apache.thrift.protocol.TType.LIST, (short)4);
+  private static final org.apache.thrift.protocol.TField 
LINT_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("lintString", 
org.apache.thrift.protocol.TType.LIST, (short)5);
+  private static final org.apache.thrift.protocol.TField 
M_STRING_STRING_FIELD_DESC = new 
org.apache.thrift.protocol.TField("mStringString", 
org.apache.thrift.protocol.TType.MAP, (short)6);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = 
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new ComplexStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new ComplexTupleSchemeFactory());
+  }
+
+  private int aint; // required
+  private String aString; // required
+  private List<Integer> lint; // required
+  private List<String> lString; // required
+  private List<IntString> lintString; // required
+  private Map<String,String> mStringString; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    AINT((short)1, "aint"),
+    A_STRING((short)2, "aString"),
+    LINT((short)3, "lint"),
+    L_STRING((short)4, "lString"),
+    LINT_STRING((short)5, "lintString"),
+    M_STRING_STRING((short)6, "mStringString");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not 
found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // AINT
+          return AINT;
+        case 2: // A_STRING
+          return A_STRING;
+        case 3: // LINT
+          return LINT;
+        case 4: // L_STRING
+          return L_STRING;
+        case 5: // LINT_STRING
+          return LINT_STRING;
+        case 6: // M_STRING_STRING
+          return M_STRING_STRING;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __AINT_ISSET_ID = 0;
+  private byte __isset_bitfield = 0;
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.AINT, new 
org.apache.thrift.meta_data.FieldMetaData("aint", 
org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    tmpMap.put(_Fields.A_STRING, new 
org.apache.thrift.meta_data.FieldMetaData("aString", 
org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.LINT, new 
org.apache.thrift.meta_data.FieldMetaData("lint", 
org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new 
org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
+    tmpMap.put(_Fields.L_STRING, new 
org.apache.thrift.meta_data.FieldMetaData("lString", 
org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new 
org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    tmpMap.put(_Fields.LINT_STRING, new 
org.apache.thrift.meta_data.FieldMetaData("lintString", 
org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new 
org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+            new 
org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT,
 IntString.class))));
+    tmpMap.put(_Fields.M_STRING_STRING, new 
org.apache.thrift.meta_data.FieldMetaData("mStringString", 
org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new 
org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Complex.class, 
metaDataMap);
+  }
+
+  public Complex() {
+  }
+
+  public Complex(
+    int aint,
+    String aString,
+    List<Integer> lint,
+    List<String> lString,
+    List<IntString> lintString,
+    Map<String,String> mStringString)
+  {
+    this();
+    this.aint = aint;
+    setAintIsSet(true);
+    this.aString = aString;
+    this.lint = lint;
+    this.lString = lString;
+    this.lintString = lintString;
+    this.mStringString = mStringString;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public Complex(Complex other) {
+    __isset_bitfield = other.__isset_bitfield;
+    this.aint = other.aint;
+    if (other.isSetAString()) {
+      this.aString = other.aString;
+    }
+    if (other.isSetLint()) {
+      List<Integer> __this__lint = new ArrayList<Integer>();
+      for (Integer other_element : other.lint) {
+        __this__lint.add(other_element);
+      }
+      this.lint = __this__lint;
+    }
+    if (other.isSetLString()) {
+      List<String> __this__lString = new ArrayList<String>();
+      for (String other_element : other.lString) {
+        __this__lString.add(other_element);
+      }
+      this.lString = __this__lString;
+    }
+    if (other.isSetLintString()) {
+      List<IntString> __this__lintString = new ArrayList<IntString>();
+      for (IntString other_element : other.lintString) {
+        __this__lintString.add(new IntString(other_element));
+      }
+      this.lintString = __this__lintString;
+    }
+    if (other.isSetMStringString()) {
+      Map<String,String> __this__mStringString = new HashMap<String,String>();
+      for (Map.Entry<String, String> other_element : 
other.mStringString.entrySet()) {
+
+        String other_element_key = other_element.getKey();
+        String other_element_value = other_element.getValue();
+
+        String __this__mStringString_copy_key = other_element_key;
+
+        String __this__mStringString_copy_value = other_element_value;
+
+        __this__mStringString.put(__this__mStringString_copy_key, 
__this__mStringString_copy_value);
+      }
+      this.mStringString = __this__mStringString;
+    }
+  }
+
+  public Complex deepCopy() {
+    return new Complex(this);
+  }
+
+  @Override
+  public void clear() {
+    setAintIsSet(false);
+    this.aint = 0;
+    this.aString = null;
+    this.lint = null;
+    this.lString = null;
+    this.lintString = null;
+    this.mStringString = null;
+  }
+
+  public int getAint() {
+    return this.aint;
+  }
+
+  public void setAint(int aint) {
+    this.aint = aint;
+    setAintIsSet(true);
+  }
+
+  public void unsetAint() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, 
__AINT_ISSET_ID);
+  }
+
+  /** Returns true if field aint is set (has been assigned a value) and false 
otherwise */
+  public boolean isSetAint() {
+    return EncodingUtils.testBit(__isset_bitfield, __AINT_ISSET_ID);
+  }
+
+  public void setAintIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __AINT_ISSET_ID, 
value);
+  }
+
+  public String getAString() {
+    return this.aString;
+  }
+
+  public void setAString(String aString) {
+    this.aString = aString;
+  }
+
+  public void unsetAString() {
+    this.aString = null;
+  }
+
+  /** Returns true if field aString is set (has been assigned a value) and 
false otherwise */
+  public boolean isSetAString() {
+    return this.aString != null;
+  }
+
+  public void setAStringIsSet(boolean value) {
+    if (!value) {
+      this.aString = null;
+    }
+  }
+
+  public int getLintSize() {
+    return (this.lint == null) ? 0 : this.lint.size();
+  }
+
+  public java.util.Iterator<Integer> getLintIterator() {
+    return (this.lint == null) ? null : this.lint.iterator();
+  }
+
+  public void addToLint(int elem) {
+    if (this.lint == null) {
+      this.lint = new ArrayList<>();
+    }
+    this.lint.add(elem);
+  }
+
+  public List<Integer> getLint() {
+    return this.lint;
+  }
+
+  public void setLint(List<Integer> lint) {
+    this.lint = lint;
+  }
+
+  public void unsetLint() {
+    this.lint = null;
+  }
+
+  /** Returns true if field lint is set (has been assigned a value) and false 
otherwise */
+  public boolean isSetLint() {
+    return this.lint != null;
+  }
+
+  public void setLintIsSet(boolean value) {
+    if (!value) {
+      this.lint = null;
+    }
+  }
+
+  public int getLStringSize() {
+    return (this.lString == null) ? 0 : this.lString.size();
+  }
+
+  public java.util.Iterator<String> getLStringIterator() {
+    return (this.lString == null) ? null : this.lString.iterator();
+  }
+
+  public void addToLString(String elem) {
+    if (this.lString == null) {
+      this.lString = new ArrayList<String>();
+    }
+    this.lString.add(elem);
+  }
+
+  public List<String> getLString() {
+    return this.lString;
+  }
+
+  public void setLString(List<String> lString) {
+    this.lString = lString;
+  }
+
+  public void unsetLString() {
+    this.lString = null;
+  }
+
+  /** Returns true if field lString is set (has been assigned a value) and 
false otherwise */
+  public boolean isSetLString() {
+    return this.lString != null;
+  }
+
+  public void setLStringIsSet(boolean value) {
+    if (!value) {
+      this.lString = null;
+    }
+  }
+
+  public int getLintStringSize() {
+    return (this.lintString == null) ? 0 : this.lintString.size();
+  }
+
+  public java.util.Iterator<IntString> getLintStringIterator() {
+    return (this.lintString == null) ? null : this.lintString.iterator();
+  }
+
+  public void addToLintString(IntString elem) {
+    if (this.lintString == null) {
+      this.lintString = new ArrayList<>();
+    }
+    this.lintString.add(elem);
+  }
+
+  public List<IntString> getLintString() {
+    return this.lintString;
+  }
+
+  public void setLintString(List<IntString> lintString) {
+    this.lintString = lintString;
+  }
+
+  public void unsetLintString() {
+    this.lintString = null;
+  }
+
+  /** Returns true if field lintString is set (has been assigned a value) and 
false otherwise */
+  public boolean isSetLintString() {
+    return this.lintString != null;
+  }
+
+  public void setLintStringIsSet(boolean value) {
+    if (!value) {
+      this.lintString = null;
+    }
+  }
+
+  public int getMStringStringSize() {
+    return (this.mStringString == null) ? 0 : this.mStringString.size();
+  }
+
+  public void putToMStringString(String key, String val) {
+    if (this.mStringString == null) {
+      this.mStringString = new HashMap<String,String>();
+    }
+    this.mStringString.put(key, val);
+  }
+
+  public Map<String,String> getMStringString() {
+    return this.mStringString;
+  }
+
+  public void setMStringString(Map<String,String> mStringString) {
+    this.mStringString = mStringString;
+  }
+
+  public void unsetMStringString() {
+    this.mStringString = null;
+  }
+
+  /** Returns true if field mStringString is set (has been assigned a value) 
and false otherwise */
+  public boolean isSetMStringString() {
+    return this.mStringString != null;
+  }
+
+  public void setMStringStringIsSet(boolean value) {
+    if (!value) {
+      this.mStringString = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case AINT:
+      if (value == null) {
+        unsetAint();
+      } else {
+        setAint((Integer)value);
+      }
+      break;
+
+    case A_STRING:
+      if (value == null) {
+        unsetAString();
+      } else {
+        setAString((String)value);
+      }
+      break;
+
+    case LINT:
+      if (value == null) {
+        unsetLint();
+      } else {
+        setLint((List<Integer>)value);
+      }
+      break;
+
+    case L_STRING:
+      if (value == null) {
+        unsetLString();
+      } else {
+        setLString((List<String>)value);
+      }
+      break;
+
+    case LINT_STRING:
+      if (value == null) {
+        unsetLintString();
+      } else {
+        setLintString((List<IntString>)value);
+      }
+      break;
+
+    case M_STRING_STRING:
+      if (value == null) {
+        unsetMStringString();
+      } else {
+        setMStringString((Map<String,String>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case AINT:
+      return Integer.valueOf(getAint());
+
+    case A_STRING:
+      return getAString();
+
+    case LINT:
+      return getLint();
+
+    case L_STRING:
+      return getLString();
+
+    case LINT_STRING:
+      return getLintString();
+
+    case M_STRING_STRING:
+      return getMStringString();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned 
a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case AINT:
+      return isSetAint();
+    case A_STRING:
+      return isSetAString();
+    case LINT:
+      return isSetLint();
+    case L_STRING:
+      return isSetLString();
+    case LINT_STRING:
+      return isSetLintString();
+    case M_STRING_STRING:
+      return isSetMStringString();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof Complex)
+      return this.equals((Complex)that);
+    return false;
+  }
+
+  public boolean equals(Complex that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_aint = true;
+    boolean that_present_aint = true;
+    if (this_present_aint || that_present_aint) {
+      if (!(this_present_aint && that_present_aint))
+        return false;
+      if (this.aint != that.aint)
+        return false;
+    }
+
+    boolean this_present_aString = true && this.isSetAString();
+    boolean that_present_aString = true && that.isSetAString();
+    if (this_present_aString || that_present_aString) {
+      if (!(this_present_aString && that_present_aString))
+        return false;
+      if (!this.aString.equals(that.aString))
+        return false;
+    }
+
+    boolean this_present_lint = true && this.isSetLint();
+    boolean that_present_lint = true && that.isSetLint();
+    if (this_present_lint || that_present_lint) {
+      if (!(this_present_lint && that_present_lint))
+        return false;
+      if (!this.lint.equals(that.lint))
+        return false;
+    }
+
+    boolean this_present_lString = true && this.isSetLString();
+    boolean that_present_lString = true && that.isSetLString();
+    if (this_present_lString || that_present_lString) {
+      if (!(this_present_lString && that_present_lString))
+        return false;
+      if (!this.lString.equals(that.lString))
+        return false;
+    }
+
+    boolean this_present_lintString = true && this.isSetLintString();
+    boolean that_present_lintString = true && that.isSetLintString();
+    if (this_present_lintString || that_present_lintString) {
+      if (!(this_present_lintString && that_present_lintString))
+        return false;
+      if (!this.lintString.equals(that.lintString))
+        return false;
+    }
+
+    boolean this_present_mStringString = true && this.isSetMStringString();
+    boolean that_present_mStringString = true && that.isSetMStringString();
+    if (this_present_mStringString || that_present_mStringString) {
+      if (!(this_present_mStringString && that_present_mStringString))
+        return false;
+      if (!this.mStringString.equals(that.mStringString))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    HashCodeBuilder builder = new HashCodeBuilder();
+
+    boolean present_aint = true;
+    builder.append(present_aint);
+    if (present_aint)
+      builder.append(aint);
+
+    boolean present_aString = true && (isSetAString());
+    builder.append(present_aString);
+    if (present_aString)
+      builder.append(aString);
+
+    boolean present_lint = true && (isSetLint());
+    builder.append(present_lint);
+    if (present_lint)
+      builder.append(lint);
+
+    boolean present_lString = true && (isSetLString());
+    builder.append(present_lString);
+    if (present_lString)
+      builder.append(lString);
+
+    boolean present_lintString = true && (isSetLintString());
+    builder.append(present_lintString);
+    if (present_lintString)
+      builder.append(lintString);
+
+    boolean present_mStringString = true && (isSetMStringString());
+    builder.append(present_mStringString);
+    if (present_mStringString)
+      builder.append(mStringString);
+
+    return builder.toHashCode();
+  }
+
+  public int compareTo(Complex other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    Complex typedOther = (Complex)other;
+
+    lastComparison = 
Boolean.valueOf(isSetAint()).compareTo(typedOther.isSetAint());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetAint()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.aint, 
typedOther.aint);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetAString()).compareTo(typedOther.isSetAString());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetAString()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.aString, 
typedOther.aString);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetLint()).compareTo(typedOther.isSetLint());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetLint()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.lint, 
typedOther.lint);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetLString()).compareTo(typedOther.isSetLString());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetLString()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.lString, 
typedOther.lString);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetLintString()).compareTo(typedOther.isSetLintString());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetLintString()) {
+      lastComparison = 
org.apache.thrift.TBaseHelper.compareTo(this.lintString, typedOther.lintString);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetMStringString()).compareTo(typedOther.isSetMStringString());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMStringString()) {
+      lastComparison = 
org.apache.thrift.TBaseHelper.compareTo(this.mStringString, 
typedOther.mStringString);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("Complex(");
+    boolean first = true;
+
+    sb.append("aint:");
+    sb.append(this.aint);
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("aString:");
+    if (this.aString == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.aString);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("lint:");
+    if (this.lint == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.lint);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("lString:");
+    if (this.lString == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.lString);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("lintString:");
+    if (this.lintString == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.lintString);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("mStringString:");
+    if (this.mStringString == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.mStringString);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java 
serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class ComplexStandardSchemeFactory implements SchemeFactory {
+    public ComplexStandardScheme getScheme() {
+      return new ComplexStandardScheme();
+    }
+  }
+
+  private static class ComplexStandardScheme extends StandardScheme<Complex> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, Complex 
struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // AINT
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.aint = iprot.readI32();
+              struct.setAintIsSet(true);
+            } else {
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 2: // A_STRING
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.aString = iprot.readString();
+              struct.setAStringIsSet(true);
+            } else {
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 3: // LINT
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list0 = 
iprot.readListBegin();
+                struct.lint = new ArrayList<Integer>(_list0.size);
+                for (int _i1 = 0; _i1 < _list0.size; ++_i1)
+                {
+                  int _elem2; // required
+                  _elem2 = iprot.readI32();
+                  struct.lint.add(_elem2);
+                }
+                iprot.readListEnd();
+              }
+              struct.setLintIsSet(true);
+            } else {
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 4: // L_STRING
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list3 = 
iprot.readListBegin();
+                struct.lString = new ArrayList<String>(_list3.size);
+                for (int _i4 = 0; _i4 < _list3.size; ++_i4)
+                {
+                  String _elem5; // required
+                  _elem5 = iprot.readString();
+                  struct.lString.add(_elem5);
+                }
+                iprot.readListEnd();
+              }
+              struct.setLStringIsSet(true);
+            } else {
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 5: // LINT_STRING
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list6 = 
iprot.readListBegin();
+                struct.lintString = new ArrayList<IntString>(_list6.size);
+                for (int _i7 = 0; _i7 < _list6.size; ++_i7)
+                {
+                  IntString _elem8; // required
+                  _elem8 = new IntString();
+                  _elem8.read(iprot);
+                  struct.lintString.add(_elem8);
+                }
+                iprot.readListEnd();
+              }
+              struct.setLintStringIsSet(true);
+            } else {
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 6: // M_STRING_STRING
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map9 = iprot.readMapBegin();
+                struct.mStringString = new 
HashMap<String,String>(2*_map9.size);
+                for (int _i10 = 0; _i10 < _map9.size; ++_i10)
+                {
+                  String _key11; // required
+                  String _val12; // required
+                  _key11 = iprot.readString();
+                  _val12 = iprot.readString();
+                  struct.mStringString.put(_key11, _val12);
+                }
+                iprot.readMapEnd();
+              }
+              struct.setMStringStringIsSet(true);
+            } else {
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, Complex 
struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      oprot.writeFieldBegin(AINT_FIELD_DESC);
+      oprot.writeI32(struct.aint);
+      oprot.writeFieldEnd();
+      if (struct.aString != null) {
+        oprot.writeFieldBegin(A_STRING_FIELD_DESC);
+        oprot.writeString(struct.aString);
+        oprot.writeFieldEnd();
+      }
+      if (struct.lint != null) {
+        oprot.writeFieldBegin(LINT_FIELD_DESC);
+        {
+          oprot.writeListBegin(new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, 
struct.lint.size()));
+          for (int _iter13 : struct.lint)
+          {
+            oprot.writeI32(_iter13);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.lString != null) {
+        oprot.writeFieldBegin(L_STRING_FIELD_DESC);
+        {
+          oprot.writeListBegin(new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, 
struct.lString.size()));
+          for (String _iter14 : struct.lString)
+          {
+            oprot.writeString(_iter14);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.lintString != null) {
+        oprot.writeFieldBegin(LINT_STRING_FIELD_DESC);
+        {
+          oprot.writeListBegin(new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, 
struct.lintString.size()));
+          for (IntString _iter15 : struct.lintString)
+          {
+            _iter15.write(oprot);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.mStringString != null) {
+        oprot.writeFieldBegin(M_STRING_STRING_FIELD_DESC);
+        {
+          oprot.writeMapBegin(new 
org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, 
org.apache.thrift.protocol.TType.STRING, struct.mStringString.size()));
+          for (Map.Entry<String, String> _iter16 : 
struct.mStringString.entrySet())
+          {
+            oprot.writeString(_iter16.getKey());
+            oprot.writeString(_iter16.getValue());
+          }
+          oprot.writeMapEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class ComplexTupleSchemeFactory implements SchemeFactory {
+    public ComplexTupleScheme getScheme() {
+      return new ComplexTupleScheme();
+    }
+  }
+
+  private static class ComplexTupleScheme extends TupleScheme<Complex> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, Complex 
struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetAint()) {
+        optionals.set(0);
+      }
+      if (struct.isSetAString()) {
+        optionals.set(1);
+      }
+      if (struct.isSetLint()) {
+        optionals.set(2);
+      }
+      if (struct.isSetLString()) {
+        optionals.set(3);
+      }
+      if (struct.isSetLintString()) {
+        optionals.set(4);
+      }
+      if (struct.isSetMStringString()) {
+        optionals.set(5);
+      }
+      oprot.writeBitSet(optionals, 6);
+      if (struct.isSetAint()) {
+        oprot.writeI32(struct.aint);
+      }
+      if (struct.isSetAString()) {
+        oprot.writeString(struct.aString);
+      }
+      if (struct.isSetLint()) {
+        {
+          oprot.writeI32(struct.lint.size());
+          for (int _iter17 : struct.lint)
+          {
+            oprot.writeI32(_iter17);
+          }
+        }
+      }
+      if (struct.isSetLString()) {
+        {
+          oprot.writeI32(struct.lString.size());
+          for (String _iter18 : struct.lString)
+          {
+            oprot.writeString(_iter18);
+          }
+        }
+      }
+      if (struct.isSetLintString()) {
+        {
+          oprot.writeI32(struct.lintString.size());
+          for (IntString _iter19 : struct.lintString)
+          {
+            _iter19.write(oprot);
+          }
+        }
+      }
+      if (struct.isSetMStringString()) {
+        {
+          oprot.writeI32(struct.mStringString.size());
+          for (Map.Entry<String, String> _iter20 : 
struct.mStringString.entrySet())
+          {
+            oprot.writeString(_iter20.getKey());
+            oprot.writeString(_iter20.getValue());
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, Complex 
struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(6);
+      if (incoming.get(0)) {
+        struct.aint = iprot.readI32();
+        struct.setAintIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.aString = iprot.readString();
+        struct.setAStringIsSet(true);
+      }
+      if (incoming.get(2)) {
+        {
+          org.apache.thrift.protocol.TList _list21 = new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, 
iprot.readI32());
+          struct.lint = new ArrayList<Integer>(_list21.size);
+          for (int _i22 = 0; _i22 < _list21.size; ++_i22)
+          {
+            int _elem23; // required
+            _elem23 = iprot.readI32();
+            struct.lint.add(_elem23);
+          }
+        }
+        struct.setLintIsSet(true);
+      }
+      if (incoming.get(3)) {
+        {
+          org.apache.thrift.protocol.TList _list24 = new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, 
iprot.readI32());
+          struct.lString = new ArrayList<String>(_list24.size);
+          for (int _i25 = 0; _i25 < _list24.size; ++_i25)
+          {
+            String _elem26; // required
+            _elem26 = iprot.readString();
+            struct.lString.add(_elem26);
+          }
+        }
+        struct.setLStringIsSet(true);
+      }
+      if (incoming.get(4)) {
+        {
+          org.apache.thrift.protocol.TList _list27 = new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, 
iprot.readI32());
+          struct.lintString = new ArrayList<IntString>(_list27.size);
+          for (int _i28 = 0; _i28 < _list27.size; ++_i28)
+          {
+            IntString _elem29; // required
+            _elem29 = new IntString();
+            _elem29.read(iprot);
+            struct.lintString.add(_elem29);
+          }
+        }
+        struct.setLintStringIsSet(true);
+      }
+      if (incoming.get(5)) {
+        {
+          org.apache.thrift.protocol.TMap _map30 = new 
org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, 
org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+          struct.mStringString = new HashMap<String,String>(2*_map30.size);
+          for (int _i31 = 0; _i31 < _map30.size; ++_i31)
+          {
+            String _key32; // required
+            String _val33; // required
+            _key32 = iprot.readString();
+            _val33 = iprot.readString();
+            struct.mStringString.put(_key32, _val33);
+          }
+        }
+        struct.setMStringStringIsSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/test/java/test/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/java/test/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
 
b/sql/hive/src/test/java/test/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
index 64d1ce9..15c2c3d 100644
--- 
a/sql/hive/src/test/java/test/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
+++ 
b/sql/hive/src/test/java/test/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
@@ -90,8 +90,10 @@ public class JavaMetastoreDataSourcesSuite {
   @After
   public void tearDown() throws IOException {
     // Clean up tables.
-    sqlContext.sql("DROP TABLE IF EXISTS javaSavedTable");
-    sqlContext.sql("DROP TABLE IF EXISTS externalTable");
+    if (sqlContext != null) {
+      sqlContext.sql("DROP TABLE IF EXISTS javaSavedTable");
+      sqlContext.sql("DROP TABLE IF EXISTS externalTable");
+    }
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/test/resources/golden/!
 operator-0-ee7f6a60a9792041b85b18cda56429bf
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/golden/! 
operator-0-ee7f6a60a9792041b85b18cda56429bf 
b/sql/hive/src/test/resources/golden/! 
operator-0-ee7f6a60a9792041b85b18cda56429bf
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/! 
operator-0-ee7f6a60a9792041b85b18cda56429bf  
@@ -0,0 +1 @@
+1

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/test/resources/golden/convert_enum_to_string-1-db089ff46f9826c7883198adacdfad59
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/resources/golden/convert_enum_to_string-1-db089ff46f9826c7883198adacdfad59
 
b/sql/hive/src/test/resources/golden/convert_enum_to_string-1-db089ff46f9826c7883198adacdfad59
index d35bf90..2383bef 100644
--- 
a/sql/hive/src/test/resources/golden/convert_enum_to_string-1-db089ff46f9826c7883198adacdfad59
+++ 
b/sql/hive/src/test/resources/golden/convert_enum_to_string-1-db089ff46f9826c7883198adacdfad59
@@ -15,9 +15,9 @@ my_enum_structlist_map        
map<string,array<struct<my_string:string,my_enum:string,o
 my_stringlist          array<string>           from deserializer   
 my_structlist          
array<struct<my_string:string,my_enum:string,optionals:struct<>>>       from 
deserializer   
 my_enumlist            array<string>           from deserializer   
-my_stringset           struct<>                from deserializer   
-my_enumset             struct<>                from deserializer   
-my_structset           struct<>                from deserializer   
+my_stringset           array<string>           from deserializer   
+my_enumset             array<string>           from deserializer   
+my_structset           
array<struct<my_string:string,my_enum:string,optionals:struct<>>>       from 
deserializer   
 optionals              struct<>                from deserializer   
 b                      string                                      
                 

http://git-wip-us.apache.org/repos/asf/spark/blob/6bd12e81/sql/hive/src/test/resources/golden/parenthesis_star_by-5-41d474f5e6d7c61c36f74b4bec4e9e44
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/resources/golden/parenthesis_star_by-5-41d474f5e6d7c61c36f74b4bec4e9e44
 
b/sql/hive/src/test/resources/golden/parenthesis_star_by-5-41d474f5e6d7c61c36f74b4bec4e9e44
new file mode 100644
index 0000000..b70e127e
--- /dev/null
+++ 
b/sql/hive/src/test/resources/golden/parenthesis_star_by-5-41d474f5e6d7c61c36f74b4bec4e9e44
@@ -0,0 +1,500 @@
+0      val_0
+0      val_0
+0      val_0
+2      val_2
+4      val_4
+5      val_5
+5      val_5
+5      val_5
+8      val_8
+9      val_9
+10     val_10
+11     val_11
+12     val_12
+12     val_12
+15     val_15
+15     val_15
+17     val_17
+18     val_18
+18     val_18
+19     val_19
+20     val_20
+24     val_24
+24     val_24
+26     val_26
+26     val_26
+27     val_27
+28     val_28
+30     val_30
+33     val_33
+34     val_34
+35     val_35
+35     val_35
+35     val_35
+37     val_37
+37     val_37
+41     val_41
+42     val_42
+42     val_42
+43     val_43
+44     val_44
+47     val_47
+51     val_51
+51     val_51
+53     val_53
+54     val_54
+57     val_57
+58     val_58
+58     val_58
+64     val_64
+65     val_65
+66     val_66
+67     val_67
+67     val_67
+69     val_69
+70     val_70
+70     val_70
+70     val_70
+72     val_72
+72     val_72
+74     val_74
+76     val_76
+76     val_76
+77     val_77
+78     val_78
+80     val_80
+82     val_82
+83     val_83
+83     val_83
+84     val_84
+84     val_84
+85     val_85
+86     val_86
+87     val_87
+90     val_90
+90     val_90
+90     val_90
+92     val_92
+95     val_95
+95     val_95
+96     val_96
+97     val_97
+97     val_97
+98     val_98
+98     val_98
+100    val_100
+100    val_100
+103    val_103
+103    val_103
+104    val_104
+104    val_104
+105    val_105
+111    val_111
+113    val_113
+113    val_113
+114    val_114
+116    val_116
+118    val_118
+118    val_118
+119    val_119
+119    val_119
+119    val_119
+120    val_120
+120    val_120
+125    val_125
+125    val_125
+126    val_126
+128    val_128
+128    val_128
+128    val_128
+129    val_129
+129    val_129
+131    val_131
+133    val_133
+134    val_134
+134    val_134
+136    val_136
+137    val_137
+137    val_137
+138    val_138
+138    val_138
+138    val_138
+138    val_138
+143    val_143
+145    val_145
+146    val_146
+146    val_146
+149    val_149
+149    val_149
+150    val_150
+152    val_152
+152    val_152
+153    val_153
+155    val_155
+156    val_156
+157    val_157
+158    val_158
+160    val_160
+162    val_162
+163    val_163
+164    val_164
+164    val_164
+165    val_165
+165    val_165
+166    val_166
+167    val_167
+167    val_167
+167    val_167
+168    val_168
+169    val_169
+169    val_169
+169    val_169
+169    val_169
+170    val_170
+172    val_172
+172    val_172
+174    val_174
+174    val_174
+175    val_175
+175    val_175
+176    val_176
+176    val_176
+177    val_177
+178    val_178
+179    val_179
+179    val_179
+180    val_180
+181    val_181
+183    val_183
+186    val_186
+187    val_187
+187    val_187
+187    val_187
+189    val_189
+190    val_190
+191    val_191
+191    val_191
+192    val_192
+193    val_193
+193    val_193
+193    val_193
+194    val_194
+195    val_195
+195    val_195
+196    val_196
+197    val_197
+197    val_197
+199    val_199
+199    val_199
+199    val_199
+200    val_200
+200    val_200
+201    val_201
+202    val_202
+203    val_203
+203    val_203
+205    val_205
+205    val_205
+207    val_207
+207    val_207
+208    val_208
+208    val_208
+208    val_208
+209    val_209
+209    val_209
+213    val_213
+213    val_213
+214    val_214
+216    val_216
+216    val_216
+217    val_217
+217    val_217
+218    val_218
+219    val_219
+219    val_219
+221    val_221
+221    val_221
+222    val_222
+223    val_223
+223    val_223
+224    val_224
+224    val_224
+226    val_226
+228    val_228
+229    val_229
+229    val_229
+230    val_230
+230    val_230
+230    val_230
+230    val_230
+230    val_230
+233    val_233
+233    val_233
+235    val_235
+237    val_237
+237    val_237
+238    val_238
+238    val_238
+239    val_239
+239    val_239
+241    val_241
+242    val_242
+242    val_242
+244    val_244
+247    val_247
+248    val_248
+249    val_249
+252    val_252
+255    val_255
+255    val_255
+256    val_256
+256    val_256
+257    val_257
+258    val_258
+260    val_260
+262    val_262
+263    val_263
+265    val_265
+265    val_265
+266    val_266
+272    val_272
+272    val_272
+273    val_273
+273    val_273
+273    val_273
+274    val_274
+275    val_275
+277    val_277
+277    val_277
+277    val_277
+277    val_277
+278    val_278
+278    val_278
+280    val_280
+280    val_280
+281    val_281
+281    val_281
+282    val_282
+282    val_282
+283    val_283
+284    val_284
+285    val_285
+286    val_286
+287    val_287
+288    val_288
+288    val_288
+289    val_289
+291    val_291
+292    val_292
+296    val_296
+298    val_298
+298    val_298
+298    val_298
+302    val_302
+305    val_305
+306    val_306
+307    val_307
+307    val_307
+308    val_308
+309    val_309
+309    val_309
+310    val_310
+311    val_311
+311    val_311
+311    val_311
+315    val_315
+316    val_316
+316    val_316
+316    val_316
+317    val_317
+317    val_317
+318    val_318
+318    val_318
+318    val_318
+321    val_321
+321    val_321
+322    val_322
+322    val_322
+323    val_323
+325    val_325
+325    val_325
+327    val_327
+327    val_327
+327    val_327
+331    val_331
+331    val_331
+332    val_332
+333    val_333
+333    val_333
+335    val_335
+336    val_336
+338    val_338
+339    val_339
+341    val_341
+342    val_342
+342    val_342
+344    val_344
+344    val_344
+345    val_345
+348    val_348
+348    val_348
+348    val_348
+348    val_348
+348    val_348
+351    val_351
+353    val_353
+353    val_353
+356    val_356
+360    val_360
+362    val_362
+364    val_364
+365    val_365
+366    val_366
+367    val_367
+367    val_367
+368    val_368
+369    val_369
+369    val_369
+369    val_369
+373    val_373
+374    val_374
+375    val_375
+377    val_377
+378    val_378
+379    val_379
+382    val_382
+382    val_382
+384    val_384
+384    val_384
+384    val_384
+386    val_386
+389    val_389
+392    val_392
+393    val_393
+394    val_394
+395    val_395
+395    val_395
+396    val_396
+396    val_396
+396    val_396
+397    val_397
+397    val_397
+399    val_399
+399    val_399
+400    val_400
+401    val_401
+401    val_401
+401    val_401
+401    val_401
+401    val_401
+402    val_402
+403    val_403
+403    val_403
+403    val_403
+404    val_404
+404    val_404
+406    val_406
+406    val_406
+406    val_406
+406    val_406
+407    val_407
+409    val_409
+409    val_409
+409    val_409
+411    val_411
+413    val_413
+413    val_413
+414    val_414
+414    val_414
+417    val_417
+417    val_417
+417    val_417
+418    val_418
+419    val_419
+421    val_421
+424    val_424
+424    val_424
+427    val_427
+429    val_429
+429    val_429
+430    val_430
+430    val_430
+430    val_430
+431    val_431
+431    val_431
+431    val_431
+432    val_432
+435    val_435
+436    val_436
+437    val_437
+438    val_438
+438    val_438
+438    val_438
+439    val_439
+439    val_439
+443    val_443
+444    val_444
+446    val_446
+448    val_448
+449    val_449
+452    val_452
+453    val_453
+454    val_454
+454    val_454
+454    val_454
+455    val_455
+457    val_457
+458    val_458
+458    val_458
+459    val_459
+459    val_459
+460    val_460
+462    val_462
+462    val_462
+463    val_463
+463    val_463
+466    val_466
+466    val_466
+466    val_466
+467    val_467
+468    val_468
+468    val_468
+468    val_468
+468    val_468
+469    val_469
+469    val_469
+469    val_469
+469    val_469
+469    val_469
+470    val_470
+472    val_472
+475    val_475
+477    val_477
+478    val_478
+478    val_478
+479    val_479
+480    val_480
+480    val_480
+480    val_480
+481    val_481
+482    val_482
+483    val_483
+484    val_484
+485    val_485
+487    val_487
+489    val_489
+489    val_489
+489    val_489
+489    val_489
+490    val_490
+491    val_491
+492    val_492
+492    val_492
+493    val_493
+494    val_494
+495    val_495
+496    val_496
+497    val_497
+498    val_498
+498    val_498
+498    val_498


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to