This is an automated email from the ASF dual-hosted git repository.

ming pushed a commit to branch master
in repository 
https://gitbox.apache.org/repos/asf/incubator-hugegraph-toolchain.git


The following commit(s) were added to refs/heads/master by this push:
     new e9cd3368 feat(loader): support jdbc date type & sync .editorconfig 
(#648)
e9cd3368 is described below

commit e9cd33681824756a65059d19b526ea9a7d6d637a
Author: MuleiSY <[email protected]>
AuthorDate: Tue Jan 14 19:06:50 2025 +0800

    feat(loader): support jdbc date type & sync .editorconfig (#648)
    
    * feat(loader): add jdbc date type support, include DATE、TIME、 DATETIME、 
TIMESTAMP and YEAR
    
    * feat(test): add test resource
    
    * refact(test): rollback DBUtil.java
    
    * refactor(style): Synchronized the main module's .editorconfig with 
toolchain
    
    * feat(loader): Restore functions rolled back due to force push and ensure 
date compatibility
    
    * fix(loader): Add casting not support exception
    
    * fix some tiny problems
    
    * chore(loader): remove unnecessary if-else logic
    
    * fix(loader): fix hdfs type not support bug
    
    * fix(hubble-ci): specified python version to fix ModuleNotFoundError
    
    * chore(ci): unite all ci style
    
    * fix(ci): test ci working
    
    * fix(hubble-ci): fix tiny problem
    
    * fix(hubble-ci): update ci's pip env
    
    * fix(hubble-ci): try remove cache
    
    * fix(hubble-ci): try remove install pip
    
    * fix(hubble-ci): fix python version problem
    
    * empty test for ci
    
    * Update codeql-analysis.yml
    
    * revert codeql
    
    * tiny improve
    
    * Update DataTypeUtil.java
    
    ---------
    
    Co-authored-by: imbajin <[email protected]>
---
 .editorconfig                                      |  75 +++++++-
 .github/outdated/release.yml                       |  10 +-
 .github/workflows/client-go-ci.yml                 |  11 +-
 .github/workflows/codeql-analysis.yml              |   8 +-
 .github/workflows/hubble-ci.yml                    |  35 ++--
 .github/workflows/license-checker.yml              |   3 +-
 .github/workflows/loader-ci.yml                    |  16 +-
 .github/workflows/spark-connector-ci.yml           |  14 +-
 .github/workflows/tools-ci.yml                     |  20 ++-
 .../hubble-dist/assembly/travis/requirements.txt   |   1 +
 .../apache/hugegraph/loader/util/DataTypeUtil.java | 193 +++++++++++----------
 .../loader/test/functional/JDBCLoadTest.java       | 104 +++++++++--
 .../resources/jdbc_sql_date_convert/schema.groovy  |  29 ++++
 .../resources/jdbc_sql_date_convert/struct.json    |  22 +++
 14 files changed, 388 insertions(+), 153 deletions(-)

diff --git a/.editorconfig b/.editorconfig
index 5c479266..cf79877b 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -21,11 +21,80 @@ root = true
 charset = utf-8
 end_of_line = lf
 insert_final_newline = true
+max_line_length = 100
+ij_wrap_on_typing = true
+ij_visual_guides = 100
 
-[*.{java, xml, py}]
+
+[*.{java,xml,py}]
 indent_style = space
 indent_size = 4
 
-[*.{java, xml}]
-# Ignore the IDEA unsupported warning & it works well (indeed)
+[*.{java,xml}]
 continuation_indent_size = 8
+# IDEA need their self argument
+ij_continuation_indent_size = 8
+
+[*.md]
+max_line_length = off
+
+[*.java]
+ij_java_doc_add_p_tag_on_empty_lines = true
+ij_java_doc_do_not_wrap_if_one_line = true
+ij_java_annotation_parameter_wrap = normal
+ij_java_align_multiline_annotation_parameters = true
+ij_java_class_count_to_use_import_on_demand = 100
+ij_java_names_count_to_use_import_on_demand = 100
+ij_java_imports_layout = $*, |, java.**, |, javax.**, |, org.**, |, com.**, |, 
*
+
+ij_java_line_comment_at_first_column = false
+ij_java_align_multiline_chained_methods = true
+ij_java_align_multiline_parameters_in_calls = true
+ij_java_align_multiline_binary_operation = true
+ij_java_align_multiline_assignment = true
+ij_java_align_multiline_ternary_operation = true
+ij_java_align_multiline_throws_list = true
+ij_java_align_multiline_extends_list = true
+ij_java_align_multiline_array_initializer_expression = true
+
+ij_java_call_parameters_wrap = normal
+ij_java_method_parameters_wrap = normal
+ij_java_resource_list_wrap = normal
+ij_java_extends_list_wrap = normal
+ij_java_throws_list_wrap = normal
+ij_java_method_call_chain_wrap = normal
+ij_java_binary_operation_wrap = normal
+ij_java_ternary_operation_wrap = normal
+ij_java_for_statement_wrap = normal
+ij_java_array_initializer_wrap = normal
+ij_java_assignment_wrap = normal
+ij_java_assert_statement_wrap = normal
+ij_java_if_brace_force = if_multiline
+ij_java_do_while_brace_force = always
+ij_java_while_brace_force = if_multiline
+ij_java_for_brace_force = if_multiline
+ij_java_wrap_long_lines = true
+ij_java_parameter_annotation_wrap = normal
+ij_java_enum_constants_wrap = split_into_lines
+
+ij_java_keep_blank_lines_in_declarations = 1
+ij_java_keep_blank_lines_in_code = 1
+ij_java_keep_blank_lines_between_package_declaration_and_header = 1
+ij_java_keep_blank_lines_before_right_brace = 1
+ij_java_blank_lines_around_class = 1
+ij_java_blank_lines_after_class_header = 1
+
+ij_smart_tabs = true
+
+[*.xml]
+max_line_length = 120
+ij_visual_guides = 120
+ij_xml_text_wrap = off
+ij_xml_space_inside_empty_tag = true
+
+[.yaml]
+ij_yaml_spaces_within_braces = false
+ij_yaml_spaces_within_brackets = false
+
+[.properties]
+ij_properties_keep_blank_lines = true
diff --git a/.github/outdated/release.yml b/.github/outdated/release.yml
index 2bec50e4..0d1c3b08 100644
--- a/.github/outdated/release.yml
+++ b/.github/outdated/release.yml
@@ -11,6 +11,11 @@ jobs:
     env:
       STATIC_DIR: assembly/static
     steps:
+      - name: Fetch Code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 2
+
       - name: Install JDK 8
         uses: actions/setup-java@v3
         with:
@@ -24,11 +29,6 @@ jobs:
           key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
           restore-keys: ${{ runner.os }}-m2
 
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 2
-
       - name: Package
         run: |
           mvn clean package -DskipTests
diff --git a/.github/workflows/client-go-ci.yml 
b/.github/workflows/client-go-ci.yml
index be32f69f..04ee0c16 100644
--- a/.github/workflows/client-go-ci.yml
+++ b/.github/workflows/client-go-ci.yml
@@ -30,7 +30,13 @@ jobs:
       fail-fast: false
       matrix:
         JAVA_VERSION: ['11']
+
     steps:
+      - name: Fetch Code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 2
+
       - name: Install JDK 11
         uses: actions/setup-java@v3
         with:
@@ -44,11 +50,6 @@ jobs:
           key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
           restore-keys: ${{ runner.os }}-m2
 
-      - name: Checkout
-        uses: actions/checkout@v4
-        with:
-          fetch-depth: 2
-
       - name: Use staged maven repo
         if: ${{ env.USE_STAGE == 'true' }}
         run: |
diff --git a/.github/workflows/codeql-analysis.yml 
b/.github/workflows/codeql-analysis.yml
index 1003b9e2..c3a27051 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -51,7 +51,7 @@ jobs:
       run: |
         cp $HOME/.m2/settings.xml /tmp/settings.xml
         mv -vf .github/configs/settings.xml $HOME/.m2/settings.xml
-        
+
     - name: Use Node.js 16
       uses: actions/setup-node@v3
       with:
@@ -64,11 +64,11 @@ jobs:
         # If you wish to specify custom queries, you can do so here or in a 
config file.
         # By default, queries listed here will override any specified in a 
config file.
         # Prefix the list here with "+" to use these queries and those in the 
config file.
-        
+
         # Details on CodeQL's query packs refer to : 
https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
         # queries: security-extended,security-and-quality
 
-        
+
     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java).
     # If this step fails, then you should remove it and run the build manually 
(see below)
     - if: matrix.language == 'python' || matrix.language == 'javascript'
@@ -83,7 +83,7 @@ jobs:
     # ℹ️ Command-line programs to run using the OS shell.
     # 📚 See 
https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
 
-    #   If the Autobuild fails above, remove it and uncomment the following 
three lines. 
+    #   If the Autobuild fails above, remove it and uncomment the following 
three lines.
     #   modify them (or add more) to build your code if your project, please 
refer to the EXAMPLE below for guidance.
 
     # - run: |
diff --git a/.github/workflows/hubble-ci.yml b/.github/workflows/hubble-ci.yml
index 2cb8c9fb..5a8820f0 100644
--- a/.github/workflows/hubble-ci.yml
+++ b/.github/workflows/hubble-ci.yml
@@ -33,12 +33,29 @@ jobs:
     env:
       USE_STAGE: 'true' # Whether to include the stage repository.
       STATIC_DIR: hugegraph-hubble/hubble-dist/assembly/static
+    strategy:
+      matrix:
+        JAVA_VERSION: ['11']
+        python-version: ["3.11"]
+
     steps:
+      - name: Fetch Code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 2
+
       - name: Install JDK 11
         uses: actions/setup-java@v3
         with:
-          java-version: '11'
+          java-version: ${{ matrix.JAVA_VERSION }}
           distribution: 'adopt'
+
+      - name: Set up Python ${{ matrix.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: ${{ matrix.python-version }}
+          cache: 'pip'
+
       # we also should cache python & yarn & downloads to avoid useless work
       - name: Cache Maven packages
         uses: actions/cache@v3
@@ -48,19 +65,6 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-maven-
 
-      - name: Cache Python packages
-        uses: actions/cache@v3
-        with:
-          path: ~/.cache/pip
-          key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
-          restore-keys: |
-            ${{ runner.os }}-pip-
-
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 2
-
       - name: use staged maven repo settings
         if: ${{ env.USE_STAGE == 'true' }}
         run: |
@@ -75,7 +79,8 @@ jobs:
 
       - name: Prepare env and service
         run: |
-          sudo pip install -r ${TRAVIS_DIR}/requirements.txt
+          
+          python -m pip install -r ${TRAVIS_DIR}/requirements.txt
           cd hugegraph-hubble
           mvn package -Dmaven.test.skip=true
           cd apache-hugegraph-hubble-incubating*
diff --git a/.github/workflows/license-checker.yml 
b/.github/workflows/license-checker.yml
index 77a51fe6..ef51a265 100644
--- a/.github/workflows/license-checker.yml
+++ b/.github/workflows/license-checker.yml
@@ -30,7 +30,8 @@ jobs:
   check-license-header:
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v4
+      - name: Fetch Code
+        uses: actions/checkout@v4
       # More info could refer to: https://github.com/apache/skywalking-eyes
       - name: Check License Header
         uses: apache/skywalking-eyes@main
diff --git a/.github/workflows/loader-ci.yml b/.github/workflows/loader-ci.yml
index 21765627..058b2d38 100644
--- a/.github/workflows/loader-ci.yml
+++ b/.github/workflows/loader-ci.yml
@@ -32,11 +32,20 @@ jobs:
       DB_USER: root
       DB_PASS: root
       DB_DATABASE: load_test
+    strategy:
+      matrix:
+        JAVA_VERSION: ['11']
+
     steps:
+      - name: Fetch Code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 2
+
       - name: Install JDK 11
         uses: actions/setup-java@v3
         with:
-          java-version: '11'
+          java-version: ${{ matrix.JAVA_VERSION }}
           distribution: 'adopt'
 
       - name: Cache Maven packages
@@ -46,11 +55,6 @@ jobs:
           key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
           restore-keys: ${{ runner.os }}-m2
 
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 2
-
       - name: use staged maven repo settings
         if: ${{ env.USE_STAGE == 'true' }}
         run: |
diff --git a/.github/workflows/spark-connector-ci.yml 
b/.github/workflows/spark-connector-ci.yml
index afcfb38b..0be6b098 100644
--- a/.github/workflows/spark-connector-ci.yml
+++ b/.github/workflows/spark-connector-ci.yml
@@ -27,7 +27,16 @@ jobs:
       TRAVIS_DIR: hugegraph-spark-connector/assembly/travis
       # hugegraph commit date: 2024-12-09
       COMMIT_ID: f838897
+    strategy:
+      matrix:
+        JAVA_VERSION: [ '11' ]
+      
     steps:
+      - name: Fetch Code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 2
+
       - name: Install JDK 11
         uses: actions/setup-java@v4
         with:
@@ -41,11 +50,6 @@ jobs:
           key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
           restore-keys: ${{ runner.os }}-m2
 
-      - name: Checkout
-        uses: actions/checkout@v4
-        with:
-          fetch-depth: 2
-
       - name: use staged maven repo settings
         if: ${{ env.USE_STAGE == 'true' }}
         run: |
diff --git a/.github/workflows/tools-ci.yml b/.github/workflows/tools-ci.yml
index b6647919..b3ea9410 100644
--- a/.github/workflows/tools-ci.yml
+++ b/.github/workflows/tools-ci.yml
@@ -27,11 +27,20 @@ jobs:
       # TODO: could we use one param to unify it? or use a action template 
(could use one ci file)
       # TODO: replace it with the (latest - n) commit id (n >= 15)
       COMMIT_ID: 29ecc0
+    strategy:
+      matrix:
+        JAVA_VERSION: [ '11' ]
+
     steps:
+      - name: Fetch Code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 2
+
       - name: Install JDK 11
         uses: actions/setup-java@v3
         with:
-          java-version: '11'
+          java-version: ${{ matrix.JAVA_VERSION }}
           distribution: 'adopt'
       - name: Cache Maven packages
         uses: actions/cache@v3
@@ -39,24 +48,25 @@ jobs:
           path: ~/.m2
           key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
           restore-keys: ${{ runner.os }}-m2
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 2
+
       - name: use staged maven repo settings
         if: ${{ env.USE_STAGE == 'true' }}
         run: |
           cp $HOME/.m2/settings.xml /tmp/settings.xml
           mv -vf .github/configs/settings.xml $HOME/.m2/settings.xml
+
       - name: Compile
         run: |
           mvn install -pl hugegraph-client,hugegraph-tools -am 
-Dmaven.javadoc.skip=true -DskipTests -ntp
+
       - name: Prepare env and service
         run: |
           $TRAVIS_DIR/install-hugegraph-from-source.sh $COMMIT_ID
+
       - name: Run test
         run: |
           mvn test -Dtest=FuncTestSuite -pl hugegraph-tools -ntp
+
       - name: Upload coverage to Codecov
         uses: codecov/codecov-action@v3
         with:
diff --git a/hugegraph-hubble/hubble-dist/assembly/travis/requirements.txt 
b/hugegraph-hubble/hubble-dist/assembly/travis/requirements.txt
index 15141dea..a6755cbd 100644
--- a/hugegraph-hubble/hubble-dist/assembly/travis/requirements.txt
+++ b/hugegraph-hubble/hubble-dist/assembly/travis/requirements.txt
@@ -3,3 +3,4 @@ pytest >= 3.0
 six >= 1.11
 behave >= 1.2.6
 assertpy >= 0.15
+requests
diff --git 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
index 80a441bc..0689d4b7 100644
--- 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
+++ 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
@@ -17,6 +17,8 @@
 
 package org.apache.hugegraph.loader.util;
 
+import java.time.LocalDateTime;
+import java.time.ZoneId;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
@@ -57,10 +59,8 @@ public final class DataTypeUtil {
         return ReflectionUtil.isSimpleType(value.getClass());
     }
 
-    public static Object convert(Object value, PropertyKey propertyKey,
-                                 InputSource source) {
-        E.checkArgumentNotNull(value,
-                               "The value to be converted can't be null");
+    public static Object convert(Object value, PropertyKey propertyKey, 
InputSource source) {
+        E.checkArgumentNotNull(value, "The value to be converted can't be 
null");
 
         String key = propertyKey.name();
         DataType dataType = propertyKey.dataType();
@@ -70,8 +70,7 @@ public final class DataTypeUtil {
                 return parseSingleValue(key, value, dataType, source);
             case SET:
             case LIST:
-                return parseMultiValues(key, value, dataType,
-                                        cardinality, source);
+                return parseMultiValues(key, value, dataType, cardinality, 
source);
             default:
                 throw new AssertionError(String.format("Unsupported 
cardinality: '%s'",
                                                        cardinality));
@@ -79,10 +78,8 @@ public final class DataTypeUtil {
     }
 
     @SuppressWarnings("unchecked")
-    public static List<Object> splitField(String key, Object rawColumnValue,
-                                          InputSource source) {
-        E.checkArgument(rawColumnValue != null,
-                        "The value to be split can't be null");
+    public static List<Object> splitField(String key, Object rawColumnValue, 
InputSource source) {
+        E.checkArgument(rawColumnValue != null, "The value to be split can't 
be null");
         if (rawColumnValue instanceof Collection) {
             return (List<Object>) rawColumnValue;
         }
@@ -112,75 +109,104 @@ public final class DataTypeUtil {
                 return UUID.fromString(value);
             }
             // UUID represented by hex string
-            E.checkArgument(value.length() == 32,
-                            "Invalid UUID value(key='%s') '%s'", key, value);
+            E.checkArgument(value.length() == 32, "Invalid UUID 
value(key='%s') '%s'", key, value);
             String high = value.substring(0, 16);
             String low = value.substring(16);
-            return new UUID(Long.parseUnsignedLong(high, 16),
-                            Long.parseUnsignedLong(low, 16));
+            return new UUID(Long.parseUnsignedLong(high, 16), 
Long.parseUnsignedLong(low, 16));
         }
         throw new IllegalArgumentException(String.format("Failed to convert 
value(key='%s') " +
                                                          "'%s'(%s) to UUID", 
key, rawValue,
                                                          rawValue.getClass()));
     }
 
-    private static Object parseSingleValue(String key, Object rawValue,
-                                           DataType dataType,
+    private static Object parseSingleValue(String key, Object rawValue, 
DataType dataType,
                                            InputSource source) {
-        // Trim space if raw value is string
-        Object value = rawValue;
-        if (rawValue instanceof String) {
-            value = ((String) rawValue).trim();
-        }
+        Object value = trimString(rawValue);
         if (dataType.isNumber()) {
             return parseNumber(key, value, dataType);
-        } else if (dataType.isBoolean()) {
-            return parseBoolean(key, value);
-        } else if (dataType.isDate()) {
-            E.checkState(source instanceof FileSource,
-                         "Only accept FileSource when convert String value " +
-                         "to Date, but got '%s'", source.getClass().getName());
-            String dateFormat = ((FileSource) source).dateFormat();
-            String timeZone = ((FileSource) source).timeZone();
-
-            if (source instanceof KafkaSource) {
-                List<String> extraDateFormats =
-                        ((KafkaSource) source).getExtraDateFormats();
-                dateFormat = ((KafkaSource) source).getDateFormat();
-                timeZone = ((KafkaSource) source).getTimeZone();
-                if (extraDateFormats == null || extraDateFormats.isEmpty()) {
-                    return parseDate(key, value, dateFormat, timeZone);
-                } else {
-                    HashSet<String> allDateFormats = new HashSet<>();
-                    allDateFormats.add(dateFormat);
-                    allDateFormats.addAll(extraDateFormats);
-                    int size = allDateFormats.size();
-                    for (String df : allDateFormats) {
-                        try {
-                            return parseDate(key, value, df, timeZone);
-                        } catch (Exception e) {
-                            if (--size <= 0) {
-                                throw e;
-                            }
-                        }
-                    }
+        }
+
+        switch (dataType) {
+            case TEXT:
+                return value.toString();
+            case BOOLEAN:
+                return parseBoolean(key, value);
+            case DATE:
+                return parseDate(key, source, value);
+            case UUID:
+                return parseUUID(key, value);
+            default:
+                E.checkArgument(checkDataType(key, value, dataType),
+                                "The value(key='%s') '%s'(%s) is not match 
with data type %s and " +
+                                "can't convert to it", key, value, 
value.getClass(), dataType);
+        }
+        return value;
+    }
+
+    private static Object trimString(Object rawValue) {
+        if (rawValue instanceof String) {
+            return ((String) rawValue).trim();
+        }
+        return rawValue;
+    }
+
+    // TODO: could extract some steps to a method
+    private static Date parseDate(String key, InputSource source, Object 
value) {
+        List<String> extraDateFormats = null;
+        String dateFormat = null;
+        String timeZone = null;
+
+        switch (source.type()) {
+            case KAFKA:
+                KafkaSource kafkaSource = (KafkaSource) source;
+                extraDateFormats = kafkaSource.getExtraDateFormats();
+                dateFormat = kafkaSource.getDateFormat();
+                timeZone = kafkaSource.getTimeZone();
+                break;
+            case JDBC:
+                /*
+                 * Warn: it uses system default timezone,
+                 * should we think a better way to compatible differ timezone 
people?
+                 */
+                long timestamp = 0L;
+                if (value instanceof Date) {
+                    timestamp = ((Date) value).getTime();
+                } else if (value instanceof LocalDateTime) {
+                    timestamp = ((LocalDateTime) 
value).atZone(ZoneId.systemDefault())
+                                                       .toInstant()
+                                                       .toEpochMilli();
                 }
-            }
+                value = new Date(timestamp);
+                break;
+            case HDFS:
+            case FILE:
+                FileSource fileSource = (FileSource) source;
+                dateFormat = fileSource.dateFormat();
+                timeZone = fileSource.timeZone();
+                break;
+            default:
+                throw new IllegalArgumentException("Date format source " +
+                                                   source.getClass().getName() 
+ " not supported");
+        }
 
+        if (extraDateFormats == null || extraDateFormats.isEmpty()) {
             return parseDate(key, value, dateFormat, timeZone);
-        } else if (dataType.isUUID()) {
-            return parseUUID(key, value);
-        } else if (dataType.isText()) {
-            if (!(rawValue instanceof String)) {
-                value = rawValue.toString();
-            }
         }
-        E.checkArgument(checkDataType(key, value, dataType),
-                        "The value(key='%s') '%s'(%s) is not match with " +
-                        "data type %s and can't convert to it",
-                        key, value, value.getClass(), dataType);
 
-        return value;
+        Set<String> allDateFormats = new HashSet<>(extraDateFormats);
+        allDateFormats.add(dateFormat);
+
+        int size = allDateFormats.size();
+        for (String df : allDateFormats) {
+            try {
+                return parseDate(key, value, df, timeZone);
+            } catch (Exception e) {
+                if (--size <= 0) {
+                    throw e;
+                }
+            }
+        }
+        return parseDate(key, value, dateFormat, timeZone);
     }
 
     /**
@@ -188,10 +214,8 @@ public final class DataTypeUtil {
      * TODO: After parsing to json, the order of the collection changed
      * in some cases (such as list<date>)
      **/
-    private static Object parseMultiValues(String key, Object values,
-                                           DataType dataType,
-                                           Cardinality cardinality,
-                                           InputSource source) {
+    private static Object parseMultiValues(String key, Object values, DataType 
dataType,
+                                           Cardinality cardinality, 
InputSource source) {
         // JSON file should not parse again
         if (values instanceof Collection &&
             checkCollectionDataType(key, (Collection<?>) values, dataType)) {
@@ -204,14 +228,12 @@ public final class DataTypeUtil {
         String rawValue = (String) values;
         List<Object> valueColl = split(key, rawValue, source);
         Collection<Object> results = cardinality == Cardinality.LIST ?
-                                     InsertionOrderUtil.newList() :
-                                     InsertionOrderUtil.newSet();
+                                     InsertionOrderUtil.newList() : 
InsertionOrderUtil.newSet();
         valueColl.forEach(value -> {
             results.add(parseSingleValue(key, value, dataType, source));
         });
         E.checkArgument(checkCollectionDataType(key, results, dataType),
-                        "Not all collection elems %s match with data type %s",
-                        results, dataType);
+                        "Not all collection elems %s match with data type %s", 
results, dataType);
         return results;
     }
 
@@ -227,9 +249,9 @@ public final class DataTypeUtil {
                 return false;
             } else {
                 throw new IllegalArgumentException(String.format(
-                          "Failed to convert '%s'(key='%s') to Boolean, " +
-                          "the acceptable boolean strings are %s or %s",
-                          key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE));
+                        "Failed to convert '%s'(key='%s') to Boolean, " +
+                        "the acceptable boolean strings are %s or %s",
+                        key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE));
             }
         }
         throw new IllegalArgumentException(String.format("Failed to convert 
value(key='%s') " +
@@ -237,10 +259,8 @@ public final class DataTypeUtil {
                                                          rawValue.getClass()));
     }
 
-    private static Number parseNumber(String key, Object value,
-                                      DataType dataType) {
-        E.checkState(dataType.isNumber(),
-                     "The target data type must be number");
+    private static Number parseNumber(String key, Object value, DataType 
dataType) {
+        E.checkState(dataType.isNumber(), "The target data type must be 
number");
 
         if (dataType.clazz().isInstance(value)) {
             return (Number) value;
@@ -277,11 +297,11 @@ public final class DataTypeUtil {
         }
     }
 
-    private static Date parseDate(String key, Object value,
-                                  String dateFormat, String timeZone) {
+    private static Date parseDate(String key, Object value, String dateFormat, 
String timeZone) {
         if (value instanceof Date) {
             return (Date) value;
         }
+
         if (value instanceof Number) {
             return new Date(((Number) value).longValue());
         } else if (value instanceof String) {
@@ -302,8 +322,7 @@ public final class DataTypeUtil {
                                                          value.getClass()));
     }
 
-    private static List<Object> split(String key, String rawValue,
-                                      InputSource source) {
+    private static List<Object> split(String key, String rawValue, InputSource 
source) {
         List<Object> valueColl = new ArrayList<>();
         if (rawValue.isEmpty()) {
             return valueColl;
@@ -340,10 +359,9 @@ public final class DataTypeUtil {
     }
 
     /**
-     * Check type of the value valid
+     * Check the type of the value valid
      */
-    private static boolean checkDataType(String key, Object value,
-                                         DataType dataType) {
+    private static boolean checkDataType(String key, Object value, DataType 
dataType) {
         if (value instanceof Number && dataType.isNumber()) {
             return parseNumber(key, value, dataType) != null;
         }
@@ -351,10 +369,9 @@ public final class DataTypeUtil {
     }
 
     /**
-     * Check type of all the values(maybe some list properties) valid
+     * Check the type of all the values (maybe some list properties) valid
      */
-    private static boolean checkCollectionDataType(String key,
-                                                   Collection<?> values,
+    private static boolean checkCollectionDataType(String key, Collection<?> 
values,
                                                    DataType dataType) {
         for (Object value : values) {
             if (!checkDataType(key, value, dataType)) {
diff --git 
a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
 
b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
index 232596ee..3a082648 100644
--- 
a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
+++ 
b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
@@ -17,20 +17,23 @@
 
 package org.apache.hugegraph.loader.test.functional;
 
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.Year;
+import java.time.format.DateTimeFormatter;
 import java.util.List;
 
 import org.apache.hugegraph.loader.HugeGraphLoader;
+import org.apache.hugegraph.structure.graph.Edge;
+import org.apache.hugegraph.structure.graph.Vertex;
+import org.apache.hugegraph.testutil.Assert;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import org.apache.hugegraph.structure.graph.Edge;
-import org.apache.hugegraph.structure.graph.Vertex;
-
-import org.apache.hugegraph.testutil.Assert;
-
 /**
  * TODO: add more test cases
  */
@@ -51,9 +54,8 @@ public class JDBCLoadTest extends LoadTest {
         clearServerData();
 
         dbUtil.connect();
-        // create database
-        dbUtil.execute(String.format("CREATE DATABASE IF NOT EXISTS `%s`;",
-                                     DATABASE));
+        // create the database
+        dbUtil.execute(String.format("CREATE DATABASE IF NOT EXISTS `%s`;", 
DATABASE));
         // create tables
         dbUtil.connect(DATABASE);
         // vertex person
@@ -72,6 +74,17 @@ public class JDBCLoadTest extends LoadTest {
                        "`price` double(10,2) NOT NULL," +
                        "PRIMARY KEY (`id`)" +
                        ") ENGINE=InnoDB DEFAULT CHARSET=utf8;");
+        // vertex date
+        dbUtil.execute("CREATE TABLE IF NOT EXISTS `date_test` (" +
+                       "`id` int(10) unsigned NOT NULL," +
+                       "`calendar_date` DATE NOT NULL," +
+                       "`calendar_datetime` DATETIME NOT NULL," +
+                       "`calendar_timestamp` TIMESTAMP NOT NULL DEFAULT 
CURRENT_TIMESTAMP," +
+                       "`calendar_time` TIME NOT NULL," +
+                       "`calendar_year` YEAR NOT NULL," +
+                       "PRIMARY KEY (`id`)" +
+                       ") ENGINE=InnoDB DEFAULT CHARSET=utf8;");
+
         // edge knows
         dbUtil.execute("CREATE TABLE IF NOT EXISTS `knows` (" +
                        "`id` int(10) unsigned NOT NULL," +
@@ -90,6 +103,7 @@ public class JDBCLoadTest extends LoadTest {
                        "`weight` double(10,2) NOT NULL," +
                        "PRIMARY KEY (`id`)" +
                        ") ENGINE=InnoDB DEFAULT CHARSET=utf8;");
+
     }
 
     @AfterClass
@@ -99,6 +113,7 @@ public class JDBCLoadTest extends LoadTest {
         dbUtil.execute("DROP TABLE IF EXISTS `software`");
         dbUtil.execute("DROP TABLE IF EXISTS `knows`");
         dbUtil.execute("DROP TABLE IF EXISTS `created`");
+        dbUtil.execute("DROP TABLE IF EXISTS `date_test`");
         // drop database
         dbUtil.execute(String.format("DROP DATABASE `%s`", DATABASE));
 
@@ -117,6 +132,7 @@ public class JDBCLoadTest extends LoadTest {
         dbUtil.execute("TRUNCATE TABLE `software`");
         dbUtil.execute("TRUNCATE TABLE `knows`");
         dbUtil.execute("TRUNCATE TABLE `created`");
+        dbUtil.execute("TRUNCATE TABLE `date_test`");
     }
 
     @Test
@@ -215,16 +231,16 @@ public class JDBCLoadTest extends LoadTest {
     @Test
     public void testNumberToStringInJDBCSource() {
         dbUtil.insert("INSERT INTO `person` VALUES " +
-                "(1,'marko',29,'Beijing')," +
-                "(2,'vadas',27,'HongKong')," +
-                "(3,'josh',32,'Beijing')," +
-                "(4,'peter',35,'Shanghai')," +
-                "(5,'li,nary',26,'Wu,han')," +
-                "(6,'tom',NULL,NULL);");
+                      "(1,'marko',29,'Beijing')," +
+                      "(2,'vadas',27,'HongKong')," +
+                      "(3,'josh',32,'Beijing')," +
+                      "(4,'peter',35,'Shanghai')," +
+                      "(5,'li,nary',26,'Wu,han')," +
+                      "(6,'tom',NULL,NULL);");
 
         dbUtil.insert("INSERT INTO `software` VALUES " +
-                "(100,'lop','java',328.08)," +
-                "(200,'ripple','java',199.67);");
+                      "(100,'lop','java',328.08)," +
+                      "(200,'ripple','java',199.67);");
 
         String[] args = new String[]{
                 "-f", configPath("jdbc_number_to_string/struct.json"),
@@ -243,4 +259,60 @@ public class JDBCLoadTest extends LoadTest {
         assertContains(vertices, "person", "age", "29");
         assertContains(vertices, "software", "price", "199.67");
     }
+
+    @Test
+    public void testJdbcSqlDateConvert() {
+        dbUtil.execute("INSERT INTO `date_test` VALUES " +
+                       "(1, '2017-12-10', '2017-12-10 15:30:45', '2017-12-10 
15:30:45', " +
+                       "'15:30:45', '2017')," +
+                       "(2, '2009-11-11', '2009-11-11 08:15:30', '2009-11-11 
08:15:30', " +
+                       "'08:15:30', '2009')," +
+                       "(3, '2017-03-24', '2017-03-24 12:00:00', '2017-03-24 
12:00:00', " +
+                       "'12:00:00', '2017');");
+
+        String[] args = new String[]{
+                "-f", configPath("jdbc_sql_date_convert/struct.json"),
+                "-s", configPath("jdbc_sql_date_convert/schema.groovy"),
+                "-g", GRAPH,
+                "-h", SERVER,
+                "-p", String.valueOf(PORT),
+                "--batch-insert-threads", "2",
+                "--test-mode", "true"
+        };
+        HugeGraphLoader.main(args);
+
+        List<Vertex> vertices = CLIENT.graph().listVertices();
+
+        Assert.assertEquals(3, vertices.size());
+        // Define formatters
+        DateTimeFormatter serverDateFormatter =
+                DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS");
+
+        // DATE check
+        DateTimeFormatter dateFormatter = 
DateTimeFormatter.ofPattern("yyyy-MM-dd");
+        LocalDateTime date = LocalDate.parse("2017-12-10", 
dateFormatter).atStartOfDay();
+
+        DateTimeFormatter datetimeFormatter = 
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
+        LocalDateTime datetime = LocalDateTime.parse("2017-12-10 15:30:45", 
datetimeFormatter);
+
+        DateTimeFormatter timestampFormatter = 
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
+        LocalDateTime timestamp = LocalDateTime.parse("2017-12-10 15:30:45", 
timestampFormatter);
+
+        DateTimeFormatter timeFormatter = 
DateTimeFormatter.ofPattern("HH:mm:ss");
+        LocalTime time = LocalTime.parse("15:30:45", timeFormatter);
+        // Supplement the date as the Epoch start
+        LocalDateTime timeWithDate = time.atDate(LocalDate.of(1970, 1, 1));
+
+        DateTimeFormatter yearFormatter = DateTimeFormatter.ofPattern("yyyy");
+        Year year = Year.parse("2017", yearFormatter);
+        // Supplement the date as the first day of the year
+        LocalDateTime yearStart = year.atDay(1).atStartOfDay();
+
+        assertContains(vertices, "date_test",
+                       "calendar_date", date.format(serverDateFormatter),
+                       "calendar_datetime", 
datetime.format(serverDateFormatter),
+                       "calendar_timestamp", 
timestamp.format(serverDateFormatter),
+                       "calendar_time", 
timeWithDate.format(serverDateFormatter),
+                       "calendar_year", yearStart.format(serverDateFormatter));
+    }
 }
diff --git 
a/hugegraph-loader/src/test/resources/jdbc_sql_date_convert/schema.groovy 
b/hugegraph-loader/src/test/resources/jdbc_sql_date_convert/schema.groovy
new file mode 100644
index 00000000..92691dbe
--- /dev/null
+++ b/hugegraph-loader/src/test/resources/jdbc_sql_date_convert/schema.groovy
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+// Define schema
+schema.propertyKey("calendar_date").asDate().ifNotExist().create()
+schema.propertyKey("calendar_datetime").asDate().ifNotExist().create()
+schema.propertyKey("calendar_timestamp").asDate().ifNotExist().create()
+schema.propertyKey("calendar_time").asDate().ifNotExist().create()
+schema.propertyKey("calendar_year").asDate().ifNotExist().create()
+
+schema.vertexLabel("date_test")
+        .useCustomizeNumberId()
+        .properties("calendar_date", "calendar_datetime", 
"calendar_timestamp", "calendar_time", "calendar_year")
+        .nullableKeys("calendar_date", "calendar_datetime", 
"calendar_timestamp", "calendar_time", "calendar_year")
+        .ifNotExist()
+        .create()
diff --git 
a/hugegraph-loader/src/test/resources/jdbc_sql_date_convert/struct.json 
b/hugegraph-loader/src/test/resources/jdbc_sql_date_convert/struct.json
new file mode 100644
index 00000000..4fbdf6a9
--- /dev/null
+++ b/hugegraph-loader/src/test/resources/jdbc_sql_date_convert/struct.json
@@ -0,0 +1,22 @@
+{
+  "vertices": [
+    {
+      "label": "date_test",
+      "input": {
+        "type": "jdbc",
+        "vendor": "mysql",
+        "driver": "com.mysql.cj.jdbc.Driver",
+        "url": "jdbc:mysql://127.0.0.1:3306",
+        "database": "load_test",
+        "table": "date_test",
+        "username": "root",
+        "password": "root",
+        "batch_size": 2
+      },
+      "id": "id",
+      "null_values": [
+        "NULL"
+      ]
+    }
+  ]
+}


Reply via email to