Repository: carbondata
Updated Branches:
  refs/heads/master 6029b2800 -> 137245057


[HOTFIX] Support TableProperties Map API for SDK

Currently SDK supports load options as map input. But table properties is not a 
map.
So this PR supports a API that can take already supported table properties as 
map.

This is will help for easy configuration for end user of SDK.
Also later if new table properties added.
No need to create separate API to support that.

This closes #2651


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/13724505
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/13724505
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/13724505

Branch: refs/heads/master
Commit: 13724505759a9b471ce78c1196bfa5337516e6e1
Parents: 6029b28
Author: ajantha-bhat <ajanthab...@gmail.com>
Authored: Thu Aug 23 15:39:29 2018 +0530
Committer: manishgupta88 <tomanishgupt...@gmail.com>
Committed: Fri Aug 24 18:29:55 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 18 +++++++
 .../TestNonTransactionalCarbonTable.scala       | 10 ++--
 .../sdk/file/CarbonWriterBuilder.java           | 52 +++++++++++++++++++-
 3 files changed, 76 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/13724505/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index e592aa5..8120efa 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -351,6 +351,24 @@ public CarbonWriterBuilder withLoadOptions(Map<String, 
String> options);
 
 ```
 /**
+ * To support the table properties for sdk writer
+ *
+ * @param options key,value pair of create table properties.
+ * supported keys values are
+ * a. blocksize -- [1-2048] values in MB. Default value is 1024
+ * b. blockletsize -- values in MB. Default value is 64 MB
+ * c. localDictionaryThreshold -- positive value, default is 10000
+ * d. enableLocalDictionary -- true / false. Default is false
+ * e. sortcolumns -- comma separated column. "c1,c2". Default all dimensions 
are sorted.
+ *
+ * @return updated CarbonWriterBuilder
+ */
+public CarbonWriterBuilder withTableProperties(Map<String, String> options);
+```
+
+
+```
+/**
 * Build a {@link CarbonWriter}, which accepts row in CSV format object
 * @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
 * @return CSVCarbonWriter

http://git-wip-us.apache.org/repos/asf/carbondata/blob/13724505/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 39f6ddc..b08a8dd 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -33,8 +33,8 @@ import org.apache.avro.file.DataFileWriter
 import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, 
GenericRecord}
 import org.apache.avro.io.{DecoderFactory, Encoder}
 import org.apache.commons.io.FileUtils
-import org.apache.spark.sql.{CarbonEnv, Row}
 import org.apache.spark.sql.test.util.QueryTest
+import org.apache.spark.sql.{CarbonEnv, Row}
 import org.junit.Assert
 import org.scalatest.BeforeAndAfterAll
 
@@ -2386,9 +2386,13 @@ class TestNonTransactionalCarbonTable extends QueryTest 
with BeforeAndAfterAll {
 
   test("test LocalDictionary with custom Threshold") {
     FileUtils.deleteDirectory(new File(writerPath))
+    val tablePropertiesMap: util.Map[String, String] =
+      Map("blocksize" -> "12",
+        "sortcolumns" -> "name",
+        "localDictionaryThreshold" -> "200",
+        "enableLocalDictionary" -> "true").asJava
     val builder = CarbonWriter.builder.isTransactionalTable(false)
-      
.sortBy(Array[String]("name")).withBlockSize(12).enableLocalDictionary(true)
-      .localDictionaryThreshold(200)
+      .withTableProperties(tablePropertiesMap)
       
.uniqueIdentifier(System.currentTimeMillis).taskNo(System.nanoTime).outputPath(writerPath)
     generateCarbonData(builder)
     assert(FileFactory.getCarbonFile(writerPath).exists())

http://git-wip-us.apache.org/repos/asf/carbondata/blob/13724505/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git 
a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
 
b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 225d373..58edbee 100644
--- 
a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ 
b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -273,6 +273,56 @@ public class CarbonWriterBuilder {
   }
 
   /**
+   * To support the table properties for sdk writer
+   *
+   * @param options key,value pair of create table properties.
+   * supported keys values are
+   * a. blocksize -- [1-2048] values in MB. Default value is 1024
+   * b. blockletsize -- values in MB. Default value is 64 MB
+   * c. localDictionaryThreshold -- positive value, default is 10000
+   * d. enableLocalDictionary -- true / false. Default is false
+   * e. sortcolumns -- comma separated column. "c1,c2". Default all dimensions 
are sorted.
+   *
+   * @return updated CarbonWriterBuilder
+   */
+  public CarbonWriterBuilder withTableProperties(Map<String, String> options) {
+    Objects.requireNonNull(options, "Table properties should not be null");
+    //validate the options.
+    if (options.size() > 5) {
+      throw new IllegalArgumentException("Supports only 5 options now. "
+          + "Refer method header or documentation");
+    }
+
+    Set<String> supportedOptions = new HashSet<>(Arrays
+        .asList("blocksize", "blockletsize", "localdictionarythreshold", 
"enablelocaldictionary",
+            "sortcolumns"));
+
+    for (String key : options.keySet()) {
+      if (!supportedOptions.contains(key.toLowerCase())) {
+        throw new IllegalArgumentException(
+            "Unsupported options. " + "Refer method header or documentation");
+      }
+    }
+
+    for (Map.Entry<String, String> entry : options.entrySet()) {
+      if (entry.getKey().equalsIgnoreCase("equalsIgnoreCase")) {
+        this.withBlockSize(Integer.parseInt(entry.getValue()));
+      } else if (entry.getKey().equalsIgnoreCase("blockletsize")) {
+        this.withBlockletSize(Integer.parseInt(entry.getValue()));
+      } else if (entry.getKey().equalsIgnoreCase("localDictionaryThreshold")) {
+        this.localDictionaryThreshold(Integer.parseInt(entry.getValue()));
+      } else if (entry.getKey().equalsIgnoreCase("enableLocalDictionary")) {
+        
this.enableLocalDictionary((entry.getValue().equalsIgnoreCase("true")));
+      } else {
+        //sort columns
+        String[] sortColumns = entry.getValue().split(",");
+        this.sortBy(sortColumns);
+      }
+    }
+    return this;
+  }
+
+  /**
    * To set the carbondata file size in MB between 1MB-2048MB
    * @param blockSize is size in MB between 1MB to 2048 MB
    * default value is 1024 MB
@@ -293,7 +343,7 @@ public class CarbonWriterBuilder {
   public CarbonWriterBuilder localDictionaryThreshold(int 
localDictionaryThreshold) {
     if (localDictionaryThreshold <= 0) {
       throw new IllegalArgumentException(
-          "Local Dictionary Threshold should be between greater than 0");
+          "Local Dictionary Threshold should be greater than 0");
     }
     this.localDictionaryThreshold = localDictionaryThreshold;
     return this;

Reply via email to