This is an automated email from the ASF dual-hosted git repository.

codope pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new fb4f026580 [HUDI-4970] Update kafka-connect readme and refactor 
HoodieConfig#create (#6857)
fb4f026580 is described below

commit fb4f02658050a74179338d4cfba07ceabe688c53
Author: Sagar Sumit <sagarsumi...@gmail.com>
AuthorDate: Thu Oct 6 08:11:35 2022 +0530

    [HUDI-4970] Update kafka-connect readme and refactor HoodieConfig#create 
(#6857)
---
 .../apache/hudi/cli/commands/TestUpgradeDowngradeCommand.java |  6 +++---
 .../org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java   |  6 +++---
 .../main/java/org/apache/hudi/common/config/HoodieConfig.java |  9 +--------
 hudi-kafka-connect/README.md                                  | 11 +++++++----
 .../sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala  |  5 +++--
 5 files changed, 17 insertions(+), 20 deletions(-)

diff --git 
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUpgradeDowngradeCommand.java
 
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUpgradeDowngradeCommand.java
index ed4c952824..ff983d44ae 100644
--- 
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUpgradeDowngradeCommand.java
+++ 
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUpgradeDowngradeCommand.java
@@ -164,9 +164,9 @@ public class TestUpgradeDowngradeCommand extends 
CLIFunctionalTestHarness {
     Path propertyFile = new Path(metaClient.getMetaPath() + "/" + 
HoodieTableConfig.HOODIE_PROPERTIES_FILE);
     // Load the properties and verify
     FSDataInputStream fsDataInputStream = 
metaClient.getFs().open(propertyFile);
-    HoodieConfig hoodieConfig = HoodieConfig.create(fsDataInputStream);
+    HoodieConfig config = new HoodieConfig();
+    config.getProps().load(fsDataInputStream);
     fsDataInputStream.close();
-    assertEquals(Integer.toString(expectedVersion.versionCode()), hoodieConfig
-        .getString(HoodieTableConfig.VERSION));
+    assertEquals(Integer.toString(expectedVersion.versionCode()), 
config.getString(HoodieTableConfig.VERSION));
   }
 }
diff --git 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java
 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java
index 39dbacabac..64ee23c35e 100644
--- 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java
+++ 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java
@@ -770,9 +770,9 @@ public class TestUpgradeDowngrade extends 
HoodieClientTestBase {
     Path propertyFile = new Path(metaClient.getMetaPath() + "/" + 
HoodieTableConfig.HOODIE_PROPERTIES_FILE);
     // Load the properties and verify
     FSDataInputStream fsDataInputStream = 
metaClient.getFs().open(propertyFile);
-    HoodieConfig hoodieConfig = HoodieConfig.create(fsDataInputStream);
+    HoodieConfig config = new HoodieConfig();
+    config.getProps().load(fsDataInputStream);
     fsDataInputStream.close();
-    assertEquals(Integer.toString(expectedVersion.versionCode()), hoodieConfig
-        .getString(HoodieTableConfig.VERSION));
+    assertEquals(Integer.toString(expectedVersion.versionCode()), 
config.getString(HoodieTableConfig.VERSION));
   }
 }
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieConfig.java 
b/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieConfig.java
index 366d19fe6e..91f0671cf9 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieConfig.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/config/HoodieConfig.java
@@ -18,15 +18,14 @@
 
 package org.apache.hudi.common.config;
 
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hudi.common.util.Option;
 import org.apache.hudi.common.util.ReflectionUtils;
 import org.apache.hudi.common.util.StringUtils;
 import org.apache.hudi.exception.HoodieException;
+
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
-import java.io.IOException;
 import java.io.Serializable;
 import java.lang.reflect.Modifier;
 import java.util.Arrays;
@@ -42,12 +41,6 @@ public class HoodieConfig implements Serializable {
 
   protected static final String CONFIG_VALUES_DELIMITER = ",";
 
-  public static HoodieConfig create(FSDataInputStream inputStream) throws 
IOException {
-    HoodieConfig config = new HoodieConfig();
-    config.props.load(inputStream);
-    return config;
-  }
-
   protected TypedProperties props;
 
   public HoodieConfig() {
diff --git a/hudi-kafka-connect/README.md b/hudi-kafka-connect/README.md
index 449236ea5c..a1d6f812c1 100644
--- a/hudi-kafka-connect/README.md
+++ b/hudi-kafka-connect/README.md
@@ -36,10 +36,10 @@ After installing these dependencies, follow steps based on 
your requirement.
 
 ### 1 - Starting the environment
 
-For runtime dependencies, we encourage using the confluent HDFS connector 
jars. We have tested our setup with
-version `10.1.0`. Either use confluent-hub to install the connector or 
download it
-from [here](https://tinyurl.com/yb472f79). You can install the confluent-hub 
command-line tool by downloading Confluent
-Platform from [here](https://tinyurl.com/s2jjby53).
+For runtime dependencies, we encourage using the confluent HDFS connector 
jars. We have tested our setup with version `10.1.0` 
+(essentially, `hadoop-common` dependency version 2.10.1 is required which 
comes as part of confluent HDFS connector). 
+Either use confluent-hub to install the connector or download it from 
[here](https://tinyurl.com/yb472f79). 
+You can install the confluent-hub command-line tool by downloading Confluent 
Platform from [here](https://tinyurl.com/s2jjby53).
 
 Copy the entire folder to the classpath that will be used by the Hudi Kafka 
Connector.
 
@@ -145,6 +145,9 @@ successful running of the workers.
 cd $KAFKA_HOME
 ./bin/connect-distributed.sh 
$HUDI_DIR/hudi-kafka-connect/demo/connect-distributed.properties
 ```
+Ensure that the `plugin.path` property points to the location where all 
connect plugins are installed.
+For this doc, it is `/usr/local/share/kafka/plugins`. If your plugins are 
installed at a different location,
+then please change the above property in 
`$HUDI_DIR/hudi-kafka-connect/demo/connect-distributed.properties`.
 
 ### 6 - To add the Hudi Sink to the Connector (delete it if you want to 
re-configure)
 
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
index 99ce78a8a1..a664a4a32c 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
@@ -87,10 +87,11 @@ class TestUpgradeOrDowngradeProcedure extends 
HoodieSparkProcedureTestBase {
     val propertyFile = new Path(metaClient.getMetaPath + "/" + 
HoodieTableConfig.HOODIE_PROPERTIES_FILE)
     // Load the properties and verify
     val fsDataInputStream = metaClient.getFs.open(propertyFile)
-    val hoodieConfig = HoodieConfig.create(fsDataInputStream)
+    val config = new HoodieConfig
+    config.getProps.load(fsDataInputStream)
     fsDataInputStream.close()
     assertResult(Integer.toString(versionCode)) {
-      hoodieConfig.getString(HoodieTableConfig.VERSION)
+      config.getString(HoodieTableConfig.VERSION)
     }
   }
 }

Reply via email to