Github user gvramana commented on a diff in the pull request:

    
https://github.com/apache/incubator-carbondata/pull/641#discussion_r106093450
  
    --- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetastore.scala
 ---
    @@ -304,38 +341,76 @@ class CarbonMetastore(conf: RuntimeConfig, val 
storePath: String) {
         if (tableExists(tableName, Some(dbName))(sparkSession)) {
           sys.error(s"Table [$tableName] already exists under Database 
[$dbName]")
         }
    +    val schemaEvolutionEntry = new 
SchemaEvolutionEntry(tableInfo.getLastUpdatedTime)
         val schemaConverter = new ThriftWrapperSchemaConverterImpl
         val thriftTableInfo = schemaConverter
           .fromWrapperToExternalTableInfo(tableInfo, dbName, tableName)
    -    val schemaEvolutionEntry = new 
SchemaEvolutionEntry(tableInfo.getLastUpdatedTime)
         
thriftTableInfo.getFact_table.getSchema_evolution.getSchema_evolution_history
           .add(schemaEvolutionEntry)
    +    val carbonTablePath = createSchemaThriftFile(tableInfo,
    +      thriftTableInfo,
    +      dbName,
    +      tableName)(sparkSession)
    +    updateSchemasUpdatedTime(touchSchemaFileSystemTime(dbName, tableName))
    +    LOGGER.info(s"Table $tableName for Database $dbName created 
successfully.")
    +    carbonTablePath
    +  }
     
    +  /**
    +   * This method will write the schema thrift file in carbon store and 
load table metadata
    +   *
    +   * @param tableInfo
    +   * @param thriftTableInfo
    +   * @param dbName
    +   * @param tableName
    +   * @param sparkSession
    +   * @return
    +   */
    +  private def createSchemaThriftFile(
    +      tableInfo: 
org.apache.carbondata.core.metadata.schema.table.TableInfo,
    +      thriftTableInfo: org.apache.carbondata.format.TableInfo,
    +      dbName: String, tableName: String)
    +    (sparkSession: SparkSession): String = {
         val carbonTableIdentifier = new CarbonTableIdentifier(dbName, 
tableName,
           tableInfo.getFactTable.getTableId)
         val carbonTablePath = CarbonStorePath.getCarbonTablePath(storePath, 
carbonTableIdentifier)
         val schemaFilePath = carbonTablePath.getSchemaFilePath
         val schemaMetadataPath = 
CarbonTablePath.getFolderContainingFile(schemaFilePath)
         tableInfo.setMetaDataFilepath(schemaMetadataPath)
         tableInfo.setStorePath(storePath)
    -    CarbonMetadata.getInstance().loadTableMetadata(tableInfo)
    -    val tableMeta = new TableMeta(carbonTableIdentifier, storePath,
    -      CarbonMetadata.getInstance().getCarbonTable(dbName + "_" + 
tableName))
    -
         val fileType = FileFactory.getFileType(schemaMetadataPath)
         if (!FileFactory.isFileExist(schemaMetadataPath, fileType)) {
           FileFactory.mkdirs(schemaMetadataPath, fileType)
         }
         val thriftWriter = new ThriftWriter(schemaFilePath, false)
    -    thriftWriter.open()
    +    thriftWriter.open(FileWriteOperation.OVERWRITE)
         thriftWriter.write(thriftTableInfo)
         thriftWriter.close()
    +    removeTableFromMetadata(dbName, tableName)
    --- End diff --
    
    directly call refresh flow after updating alter schema


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

Reply via email to