This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 36be287  [MINOR] Fix typo,'Kakfa' corrected to 'Kafka' & 'parquest' 
corrected to 'parquet' (#3717)
36be287 is described below

commit 36be28712196ff4427c41b0aa885c7fcd7356d7f
Author: 董可伦 <dongkelu...@inspur.com>
AuthorDate: Sun Sep 26 21:53:39 2021 +0800

    [MINOR] Fix typo,'Kakfa' corrected to 'Kafka' & 'parquest' corrected to 
'parquet' (#3717)
---
 .../src/main/java/org/apache/hudi/common/config/ConfigGroups.java     | 2 +-
 .../java/org/apache/hudi/connect/writers/KafkaConnectConfigs.java     | 2 +-
 .../org/apache/hudi/utilities/functional/TestHoodieDeltaStreamer.java | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java 
b/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
index 18b7de2..08e1bb4 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/config/ConfigGroups.java
@@ -74,7 +74,7 @@ public class ConfigGroups {
             + "Hudi stats and metrics.";
         break;
       case KAFKA_CONNECT:
-        description = "These set of configs are used for Kakfa Connect Sink 
Connector for writing Hudi Tables";
+        description = "These set of configs are used for Kafka Connect Sink 
Connector for writing Hudi Tables";
         break;
       default:
         description = "Please fill in the description for Config Group Name: " 
+ names.name;
diff --git 
a/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/writers/KafkaConnectConfigs.java
 
b/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/writers/KafkaConnectConfigs.java
index e5662bd..c264c3c 100644
--- 
a/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/writers/KafkaConnectConfigs.java
+++ 
b/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/writers/KafkaConnectConfigs.java
@@ -36,7 +36,7 @@ import java.util.Properties;
 @Immutable
 @ConfigClassProperty(name = "Kafka Sink Connect Configurations",
     groupName = ConfigGroups.Names.KAFKA_CONNECT,
-    description = "Configurations for Kakfa Connect Sink Connector for Hudi.")
+    description = "Configurations for Kafka Connect Sink Connector for Hudi.")
 public class KafkaConnectConfigs extends HoodieConfig {
 
   public static final String KAFKA_VALUE_CONVERTER = "value.converter";
diff --git 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHoodieDeltaStreamer.java
 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHoodieDeltaStreamer.java
index 4796ff0..fe1a151 100644
--- 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHoodieDeltaStreamer.java
+++ 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHoodieDeltaStreamer.java
@@ -1514,8 +1514,8 @@ public class TestHoodieDeltaStreamer extends 
TestHoodieDeltaStreamerBase {
 
     prepareParquetDFSSource(true, false, "source_uber.avsc", 
"target_uber.avsc", PROPS_FILENAME_TEST_PARQUET,
         PARQUET_SOURCE_ROOT, false);
-    // delta streamer w/ parquest source
-    String tableBasePath = dfsBasePath + "/test_dfs_to_kakfa" + testNum;
+    // delta streamer w/ parquet source
+    String tableBasePath = dfsBasePath + "/test_dfs_to_kafka" + testNum;
     HoodieDeltaStreamer deltaStreamer = new HoodieDeltaStreamer(
         TestHelpers.makeConfig(tableBasePath, WriteOperationType.INSERT, 
ParquetDFSSource.class.getName(),
             Collections.EMPTY_LIST, PROPS_FILENAME_TEST_PARQUET, false,

Reply via email to