Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16787#discussion_r100059580
  
    --- Diff: sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala 
---
    @@ -463,117 +459,6 @@ private[spark] object HiveUtils extends Logging {
         case (other, tpe) if primitiveTypes contains tpe => other.toString
       }
     
    -  /** Converts the native StructField to Hive's FieldSchema. */
    -  private def toHiveColumn(c: StructField): FieldSchema = {
    -    val typeString = if (c.metadata.contains(HiveUtils.hiveTypeString)) {
    -      c.metadata.getString(HiveUtils.hiveTypeString)
    -    } else {
    -      c.dataType.catalogString
    -    }
    -    new FieldSchema(c.name, typeString, c.getComment.orNull)
    -  }
    -
    -  /** Builds the native StructField from Hive's FieldSchema. */
    -  private def fromHiveColumn(hc: FieldSchema): StructField = {
    -    val columnType = try {
    -      CatalystSqlParser.parseDataType(hc.getType)
    -    } catch {
    -      case e: ParseException =>
    -        throw new SparkException("Cannot recognize hive type string: " + 
hc.getType, e)
    -    }
    -
    -    val metadata = new 
MetadataBuilder().putString(HiveUtils.hiveTypeString, hc.getType).build()
    -    val field = StructField(
    -      name = hc.getName,
    -      dataType = columnType,
    -      nullable = true,
    -      metadata = metadata)
    -    Option(hc.getComment).map(field.withComment).getOrElse(field)
    -  }
    -
    -  // TODO: merge this with HiveClientImpl#toHiveTable
    -  /** Converts the native table metadata representation format 
CatalogTable to Hive's Table. */
    -  def toHiveTable(catalogTable: CatalogTable): HiveTable = {
    -    // We start by constructing an API table as Hive performs several 
important transformations
    -    // internally when converting an API table to a QL table.
    -    val tTable = new org.apache.hadoop.hive.metastore.api.Table()
    -    tTable.setTableName(catalogTable.identifier.table)
    -    tTable.setDbName(catalogTable.database)
    -
    -    val tableParameters = new java.util.HashMap[String, String]()
    -    tTable.setParameters(tableParameters)
    -    catalogTable.properties.foreach { case (k, v) => 
tableParameters.put(k, v) }
    -
    -    tTable.setTableType(catalogTable.tableType match {
    -      case CatalogTableType.EXTERNAL => 
HiveTableType.EXTERNAL_TABLE.toString
    -      case CatalogTableType.MANAGED => HiveTableType.MANAGED_TABLE.toString
    -      case CatalogTableType.VIEW => HiveTableType.VIRTUAL_VIEW.toString
    -    })
    -
    -    val sd = new org.apache.hadoop.hive.metastore.api.StorageDescriptor()
    --- End diff --
    
    here we set the data location via `StorageDescriptor`, is this way safe for 
all hive versions?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to