Github user wangyum commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20866#discussion_r175858111
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala 
---
    @@ -908,11 +912,39 @@ private[hive] object HiveClientImpl {
         Utils.classForName(name)
           .asInstanceOf[Class[_ <: 
org.apache.hadoop.hive.ql.io.HiveOutputFormat[_, _]]]
     
    +  private def toHiveMetaApiTable(table: CatalogTable): HiveMetaApiTable = {
    +    val sd = new StorageDescriptor
    +    sd.setSerdeInfo(new SerDeInfo)
    +    sd.setNumBuckets(-1)
    +    sd.setBucketCols(new JArrayList[String])
    +    sd.setCols(new JArrayList[FieldSchema])
    +    sd.setParameters(new JHashMap[String, String])
    +    sd.setSortCols(new JArrayList[Order])
    +    sd.getSerdeInfo.setParameters(new JHashMap[String, String])
    +    sd.getSerdeInfo.getParameters.put(serdeConstants.SERIALIZATION_FORMAT, 
"1")
    +    sd.setInputFormat(classOf[SequenceFileInputFormat[_, _]].getName)
    +    sd.setOutputFormat(classOf[HiveSequenceFileOutputFormat[_, _]].getName)
    +    val skewInfo: SkewedInfo = new SkewedInfo
    +    skewInfo.setSkewedColNames(new JArrayList[String])
    +    skewInfo.setSkewedColValues(new JArrayList[JList[String]])
    +    skewInfo.setSkewedColValueLocationMaps(new JHashMap[JList[String], 
String])
    +    sd.setSkewedInfo(skewInfo)
    +
    +    val apiTable = new HiveMetaApiTable()
    +    apiTable.setSd(sd)
    +    apiTable.setPartitionKeys(new JArrayList[FieldSchema])
    +    apiTable.setParameters(new JHashMap[String, String])
    +    apiTable.setTableType(HiveTableType.MANAGED_TABLE.toString)
    +    apiTable.setDbName(table.database)
    +    apiTable.setTableName(table.identifier.table)
    +    apiTable
    +  }
    +
       /**
        * Converts the native table metadata representation format CatalogTable 
to Hive's Table.
        */
       def toHiveTable(table: CatalogTable, userName: Option[String] = None): 
HiveTable = {
    -    val hiveTable = new HiveTable(table.database, table.identifier.table)
    +    val hiveTable = new HiveTable(toHiveMetaApiTable(table))
    --- End diff --
    
    Avoid 
[`t.setOwner(SessionState.getUserFromAuthenticator())`](https://github.com/apache/hive/blob/rel/release-2.3.2/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java#L180),
 because it will connect to 
[Metastore](https://github.com/apache/hive/blob/rel/release-2.3.2/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java#L913),
 and we will set owner later: 
https://github.com/apache/spark/blob/v2.3.0/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala#L914


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to