Github user yhuai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16296#discussion_r94699866
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala ---
    @@ -18,14 +18,79 @@
     package org.apache.spark.sql.hive
     
     import org.apache.spark.sql._
    +import org.apache.spark.sql.catalyst.catalog.CatalogStorageFormat
     import org.apache.spark.sql.catalyst.expressions._
     import org.apache.spark.sql.catalyst.planning._
     import org.apache.spark.sql.catalyst.plans._
     import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
    +import org.apache.spark.sql.catalyst.rules.Rule
     import org.apache.spark.sql.execution._
    -import org.apache.spark.sql.execution.command.ExecutedCommandExec
    +import org.apache.spark.sql.execution.command.{DDLUtils, 
ExecutedCommandExec}
     import org.apache.spark.sql.execution.datasources.CreateTable
     import org.apache.spark.sql.hive.execution._
    +import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
    +
    +
    +/**
    + * Determine the serde/format of the Hive serde table, according to the 
storage properties.
    + */
    +class DetermineHiveSerde(conf: SQLConf) extends Rule[LogicalPlan] {
    +  override def apply(plan: LogicalPlan): LogicalPlan = plan 
resolveOperators {
    +    case c @ CreateTable(t, _, _) if DDLUtils.isHiveTable(t) && 
t.storage.inputFormat.isEmpty =>
    +      if (t.bucketSpec.nonEmpty) {
    +        throw new AnalysisException("Cannot create bucketed Hive serde 
table.")
    +      }
    +
    +      val defaultStorage: CatalogStorageFormat = {
    +        val defaultStorageType = 
conf.getConfString("hive.default.fileformat", "textfile")
    +        val defaultHiveSerde = HiveSerDe.sourceToSerDe(defaultStorageType)
    +        CatalogStorageFormat(
    +          locationUri = None,
    +          inputFormat = defaultHiveSerde.flatMap(_.inputFormat)
    +            .orElse(Some("org.apache.hadoop.mapred.TextInputFormat")),
    +          outputFormat = defaultHiveSerde.flatMap(_.outputFormat)
    +            
.orElse(Some("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat")),
    +          serde = defaultHiveSerde.flatMap(_.serde)
    +            
.orElse(Some("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")),
    +          compressed = false,
    +          properties = Map())
    +      }
    +
    +      val options = new HiveOptions(t.storage.properties)
    +
    +      val fileStorage = if (options.format.isDefined) {
    +        HiveSerDe.sourceToSerDe(options.format.get) match {
    +          case Some(s) =>
    +            CatalogStorageFormat.empty.copy(
    +              inputFormat = s.inputFormat,
    +              outputFormat = s.outputFormat,
    +              serde = s.serde)
    +          case None =>
    +            throw new IllegalArgumentException(s"invalid format: 
'${options.format.get}'")
    +        }
    +      } else if (options.inputFormat.isDefined) {
    --- End diff --
    
    Maybe we should use a helper function to know if inputFormat and 
outputFormat are set? The current version assumes that the reader know the 
internal of HiveOptions.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to