[carbondata] 05/08: [CARBONDATA-3836] Fix metadata folder FileNotFoundException while creating new carbon table

2020-06-01 Thread kunalkapoor
This is an automated email from the ASF dual-hosted git repository.

kunalkapoor pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/carbondata.git

commit be50362f14511e014a45b2eff7ecffd579a3b877
Author: Manhua 
AuthorDate: Sat May 30 10:47:10 2020 +0800

[CARBONDATA-3836] Fix metadata folder FileNotFoundException while creating 
new carbon table

Why is this PR needed?
1. In the case of using carbon only setting carbon.storelocation, carbon 
will use local spark warehouse path instead of the configured value.
2. FileNotFoundException is thrown when creating schema file for a brand 
new table. Because current implementation gets the schema file path by listing 
the Metadata directory which has not been created.

What changes were proposed in this PR?
1. spark.sql.warehouse.dir has its own default value in Spark, remove using 
carbonStorePath as default value, which will make 
hiveStorePath.equals(carbonStorePath) TRUE when user not set 
spark.sql.warehouse.dir.
2. create the Metadata directory before getting the schema file path.

Does this PR introduce any user interface change?
No

Is any new testcase added?
No

This closes #3780
---
 integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala | 2 +-
 .../main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala| 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala 
b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index 9df5809..5062a43 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -328,7 +328,7 @@ object CarbonEnv {
 if ((!EnvHelper.isLegacy(sparkSession)) &&
 (dbName.equals("default") || databaseLocation.endsWith(".db"))) {
   val carbonStorePath = CarbonProperties.getStorePath()
-  val hiveStorePath = sparkSession.conf.get("spark.sql.warehouse.dir", 
carbonStorePath)
+  val hiveStorePath = sparkSession.conf.get("spark.sql.warehouse.dir")
   // if carbon.store does not point to spark.sql.warehouse.dir then follow 
the old table path
   // format
   if (carbonStorePath != null && !hiveStorePath.equals(carbonStorePath)) {
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 5156798..4c5f16d 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -402,8 +402,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
   private def createSchemaThriftFile(
   identifier: AbsoluteTableIdentifier,
   thriftTableInfo: TableInfo): String = {
-val schemaFilePath = 
CarbonTablePath.getSchemaFilePath(identifier.getTablePath)
-val schemaMetadataPath = 
CarbonTablePath.getFolderContainingFile(schemaFilePath)
+val schemaMetadataPath = 
CarbonTablePath.getMetadataPath(identifier.getTablePath)
 if (!FileFactory.isFileExist(schemaMetadataPath)) {
   val isDirCreated = FileFactory
 .mkdirs(schemaMetadataPath, 
SparkSession.getActiveSession.get.sessionState.newHadoopConf())
@@ -411,6 +410,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
 throw new IOException(s"Failed to create the metadata directory 
$schemaMetadataPath")
   }
 }
+val schemaFilePath = 
CarbonTablePath.getSchemaFilePath(identifier.getTablePath)
 val thriftWriter = new ThriftWriter(schemaFilePath, false)
 thriftWriter.open(FileWriteOperation.OVERWRITE)
 thriftWriter.write(thriftTableInfo)



[carbondata] 05/08: [CARBONDATA-3836] Fix metadata folder FileNotFoundException while creating new carbon table

2020-06-01 Thread kunalkapoor
This is an automated email from the ASF dual-hosted git repository.

kunalkapoor pushed a commit to annotated tag apache-carbondata-2.0.1-rc1
in repository https://gitbox.apache.org/repos/asf/carbondata.git

commit be50362f14511e014a45b2eff7ecffd579a3b877
Author: Manhua 
AuthorDate: Sat May 30 10:47:10 2020 +0800

[CARBONDATA-3836] Fix metadata folder FileNotFoundException while creating 
new carbon table

Why is this PR needed?
1. In the case of using carbon only setting carbon.storelocation, carbon 
will use local spark warehouse path instead of the configured value.
2. FileNotFoundException is thrown when creating schema file for a brand 
new table. Because current implementation gets the schema file path by listing 
the Metadata directory which has not been created.

What changes were proposed in this PR?
1. spark.sql.warehouse.dir has its own default value in Spark, remove using 
carbonStorePath as default value, which will make 
hiveStorePath.equals(carbonStorePath) TRUE when user not set 
spark.sql.warehouse.dir.
2. create the Metadata directory before getting the schema file path.

Does this PR introduce any user interface change?
No

Is any new testcase added?
No

This closes #3780
---
 integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala | 2 +-
 .../main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala| 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala 
b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index 9df5809..5062a43 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -328,7 +328,7 @@ object CarbonEnv {
 if ((!EnvHelper.isLegacy(sparkSession)) &&
 (dbName.equals("default") || databaseLocation.endsWith(".db"))) {
   val carbonStorePath = CarbonProperties.getStorePath()
-  val hiveStorePath = sparkSession.conf.get("spark.sql.warehouse.dir", 
carbonStorePath)
+  val hiveStorePath = sparkSession.conf.get("spark.sql.warehouse.dir")
   // if carbon.store does not point to spark.sql.warehouse.dir then follow 
the old table path
   // format
   if (carbonStorePath != null && !hiveStorePath.equals(carbonStorePath)) {
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 5156798..4c5f16d 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -402,8 +402,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
   private def createSchemaThriftFile(
   identifier: AbsoluteTableIdentifier,
   thriftTableInfo: TableInfo): String = {
-val schemaFilePath = 
CarbonTablePath.getSchemaFilePath(identifier.getTablePath)
-val schemaMetadataPath = 
CarbonTablePath.getFolderContainingFile(schemaFilePath)
+val schemaMetadataPath = 
CarbonTablePath.getMetadataPath(identifier.getTablePath)
 if (!FileFactory.isFileExist(schemaMetadataPath)) {
   val isDirCreated = FileFactory
 .mkdirs(schemaMetadataPath, 
SparkSession.getActiveSession.get.sessionState.newHadoopConf())
@@ -411,6 +410,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
 throw new IOException(s"Failed to create the metadata directory 
$schemaMetadataPath")
   }
 }
+val schemaFilePath = 
CarbonTablePath.getSchemaFilePath(identifier.getTablePath)
 val thriftWriter = new ThriftWriter(schemaFilePath, false)
 thriftWriter.open(FileWriteOperation.OVERWRITE)
 thriftWriter.write(thriftTableInfo)