Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16626#discussion_r101961067
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala ---
    @@ -563,35 +574,47 @@ private[spark] class HiveExternalCatalog(conf: 
SparkConf, hadoopConf: Configurat
             //       want to alter the table location to a file path, we will 
fail. This should be fixed
             //       in the future.
     
    -        val newLocation = tableDefinition.storage.locationUri
    -        val storageWithPathOption = tableDefinition.storage.copy(
    -          properties = tableDefinition.storage.properties ++ 
newLocation.map("path" -> _))
    +        val newLocation = newTableDefinition.storage.locationUri
    +        val storageWithPathOption = newTableDefinition.storage.copy(
    +          properties = newTableDefinition.storage.properties ++ 
newLocation.map("path" -> _))
     
    -        val oldLocation = getLocationFromStorageProps(oldTableDef)
    +        val oldLocation = getLocationFromStorageProps(oldRawTableDef)
             if (oldLocation == newLocation) {
    -          storageWithPathOption.copy(locationUri = 
oldTableDef.storage.locationUri)
    +          storageWithPathOption.copy(locationUri = 
oldRawTableDef.storage.locationUri)
             } else {
               storageWithPathOption
             }
           }
     
    -      val partitionProviderProp = if 
(tableDefinition.tracksPartitionsInCatalog) {
    +      val partitionProviderProp = if 
(newTableDefinition.tracksPartitionsInCatalog) {
             TABLE_PARTITION_PROVIDER -> TABLE_PARTITION_PROVIDER_CATALOG
           } else {
             TABLE_PARTITION_PROVIDER -> TABLE_PARTITION_PROVIDER_FILESYSTEM
           }
     
    -      // Sets the `schema`, `partitionColumnNames` and `bucketSpec` from 
the old table definition,
    +      // Sets the `partitionColumnNames` and `bucketSpec` from the old 
table definition,
           // to retain the spark specific format if it is. Also add old data 
source properties to table
           // properties, to retain the data source table format.
    -      val oldDataSourceProps = 
oldTableDef.properties.filter(_._1.startsWith(DATASOURCE_PREFIX))
    -      val newTableProps = oldDataSourceProps ++ withStatsProps.properties 
+ partitionProviderProp
    -      val newDef = withStatsProps.copy(
    +      val dataSourceProps = if (schemaChange) {
    +        val props =
    +          
tableMetaToTableProps(newTableDefinition).filter(_._1.startsWith(DATASOURCE_PREFIX))
    +        if (newTableDefinition.provider.isDefined
    +          && newTableDefinition.provider.get.toLowerCase != 
DDLUtils.HIVE_PROVIDER) {
    +          // we only need to populate non-hive provider to the tableprops
    +          props.put(DATASOURCE_PROVIDER, newTableDefinition.provider.get)
    +        }
    +        props
    +      } else {
    +        
oldRawTableDef.properties.filter(_._1.startsWith(DATASOURCE_PREFIX))
    +      }
    +      val newTableProps =
    +        dataSourceProps ++ maybeWithStatsPropsTable.properties + 
partitionProviderProp
    --- End diff --
    
    Let's create a new helper function for generating the table properties. 
Now, `alterTable` has 100+ lines


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to