While executing drop table , invalidating the table from hive metastore by running the command externally
Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/df3dea90 Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/df3dea90 Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/df3dea90 Branch: refs/heads/12-dev Commit: df3dea90b088e1de2725cbb5cef1c8f90209d9fe Parents: 69edde1 Author: Manohar <manohar.craz...@gmail.com> Authored: Wed Apr 5 20:58:48 2017 +0530 Committer: Manohar <manohar.craz...@gmail.com> Committed: Thu Apr 6 12:49:46 2017 +0530 ---------------------------------------------------------------------- .../carbondata/core/locks/HdfsFileLock.java | 28 ++++++++++++-------- .../spark/sql/hive/CarbonHiveMetadataUtil.scala | 3 ++- 2 files changed, 19 insertions(+), 12 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/df3dea90/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java index b3d79e9..aed73b3 100644 --- a/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java +++ b/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; -import org.apache.carbondata.core.datastore.filesystem.CarbonFile; import org.apache.carbondata.core.datastore.impl.FileFactory; import org.apache.carbondata.core.metadata.CarbonTableIdentifier; import org.apache.carbondata.core.util.CarbonProperties; @@ -107,19 +106,26 @@ public class HdfsFileLock extends AbstractCarbonLock { try { dataOutputStream.close(); } catch (IOException e) { + try { + if (!FileFactory.isFileExist(location, FileFactory.getFileType(location))) { + return true; + } + } catch (IOException e1) { + LOGGER.error("Exception in isFileExist of the lock file " + e1.getMessage()); + } + LOGGER.error("Exception in unlocking of the lock file " + e.getMessage()); return false; } finally { - CarbonFile carbonFile = - FileFactory.getCarbonFile(location, FileFactory.getFileType(location)); - if (carbonFile.exists()) { - if (carbonFile.delete()) { - LOGGER.info("Deleted the lock file " + location); - } else { - LOGGER.error("Not able to delete the lock file " + location); + try { + if (FileFactory.isFileExist(location, FileFactory.getFileType(location))) { + if (FileFactory.getCarbonFile(location, FileFactory.getFileType(location)).delete()) { + LOGGER.info("Deleted the lock file " + location); + } else { + LOGGER.error("Not able to delete the lock file " + location); + } } - } else { - LOGGER.error("Not able to delete the lock file because " - + "it is not existed in location " + location); + } catch (IOException e) { + LOGGER.error("Exception in isFileExist of the lock file " + e.getMessage()); } } } http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/df3dea90/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala index fc5730b..657d7de 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala @@ -41,7 +41,8 @@ object CarbonHiveMetadataUtil { tableName: String, sparkSession: SparkSession): Unit = { try { - sparkSession.sql(s"DROP TABLE IF EXISTS $databaseName.$tableName") + sparkSession.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client. + runSqlHive(s"DROP TABLE IF EXISTS $databaseName.$tableName") } catch { case e: Exception => LOGGER.audit(