This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 16f36aacab9 branch-3.0: [enhance](regression-test) fix hudi 
incremental query bug and add hudi p2 cases #44003 (#44457)
16f36aacab9 is described below

commit 16f36aacab9256ab1b6ef0db044cad7ed9263253
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Fri Nov 29 19:35:46 2024 +0800

    branch-3.0: [enhance](regression-test) fix hudi incremental query bug and 
add hudi p2 cases #44003 (#44457)
    
    Cherry-picked from #44003
    
    Co-authored-by: Socrates <suyit...@selectdb.com>
---
 .../hudi/source/COWIncrementalRelation.java        |  10 +-
 regression-test/conf/regression-conf.groovy        |   3 +
 .../hudi/test_hudi_incremental.out                 | 349 +++++++++++++++++++++
 .../hudi/test_hudi_schema_evolution.out            |  33 ++
 .../external_table_p2/hudi/test_hudi_snapshot.out  | Bin 0 -> 348526 bytes
 .../external_table_p2/hudi/test_hudi_timestamp.out |   6 +
 .../hudi/test_hudi_timetravel.out                  | 236 +++++++-------
 .../hudi/test_hudi_catalog.groovy                  |  39 +++
 .../hudi/test_hudi_incremental.groovy              | 111 +++++++
 .../hudi/test_hudi_schema_evolution.groovy         |  48 +++
 .../hudi/test_hudi_snapshot.groovy                 |  91 ++++++
 .../hudi/test_hudi_timestamp.groovy                |  62 ++++
 .../hudi/test_hudi_timetravel.groovy               | 138 ++++----
 13 files changed, 929 insertions(+), 197 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/COWIncrementalRelation.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/COWIncrementalRelation.java
index 5e76996bb12..7981a0b4f26 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/COWIncrementalRelation.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/COWIncrementalRelation.java
@@ -79,7 +79,7 @@ public class COWIncrementalRelation implements 
IncrementalRelation {
         if (!metaClient.getTableConfig().populateMetaFields()) {
             throw new HoodieException("Incremental queries are not supported 
when meta fields are disabled");
         }
-        HoodieInstant lastInstant = commitTimeline.lastInstant().get();
+
         String startInstantTime = 
optParams.get("hoodie.datasource.read.begin.instanttime");
         if (startInstantTime == null) {
             throw new HoodieException("Specify the begin instant time to pull 
from using "
@@ -89,16 +89,18 @@ public class COWIncrementalRelation implements 
IncrementalRelation {
             startInstantTime = "000";
         }
         String endInstantTime = 
optParams.getOrDefault("hoodie.datasource.read.end.instanttime",
-                lastInstant.getTimestamp());
+                hollowCommitHandling == 
HollowCommitHandling.USE_TRANSITION_TIME
+                        ? 
commitTimeline.lastInstant().get().getStateTransitionTime()
+                        : commitTimeline.lastInstant().get().getTimestamp());
         startInstantArchived = 
commitTimeline.isBeforeTimelineStarts(startInstantTime);
         endInstantArchived = 
commitTimeline.isBeforeTimelineStarts(endInstantTime);
 
         HoodieTimeline commitsTimelineToReturn;
         if (hollowCommitHandling == HollowCommitHandling.USE_TRANSITION_TIME) {
             commitsTimelineToReturn = 
commitTimeline.findInstantsInRangeByStateTransitionTime(startInstantTime,
-                    lastInstant.getStateTransitionTime());
+                    endInstantTime);
         } else {
-            commitsTimelineToReturn = 
commitTimeline.findInstantsInRange(startInstantTime, 
lastInstant.getTimestamp());
+            commitsTimelineToReturn = 
commitTimeline.findInstantsInRange(startInstantTime, endInstantTime);
         }
         List<HoodieInstant> commitsToReturn = 
commitsTimelineToReturn.getInstants();
 
diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index ab8151e49e9..a6c5929b4ac 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -205,6 +205,9 @@ extEsPort = 9200
 extEsUser = "*******"
 extEsPassword = "***********"
 
+enableExternalHudiTest = false
+hudiEmrCatalog = "***********"
+
 enableObjStorageTest=false
 enableMaxComputeTest=false
 aliYunAk="***********"
diff --git 
a/regression-test/data/external_table_p2/hudi/test_hudi_incremental.out 
b/regression-test/data/external_table_p2/hudi/test_hudi_incremental.out
new file mode 100644
index 00000000000..b1bdad85013
--- /dev/null
+++ b/regression-test/data/external_table_p2/hudi/test_hudi_incremental.out
@@ -0,0 +1,349 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !incremental_1_end --
+9000
+
+-- !incremental_earliest_1 --
+1000
+
+-- !incremental_2_end --
+8000
+
+-- !incremental_earliest_2 --
+2000
+
+-- !incremental_1_2 --
+1000
+
+-- !incremental_3_end --
+7000
+
+-- !incremental_earliest_3 --
+3000
+
+-- !incremental_2_3 --
+1000
+
+-- !incremental_4_end --
+6000
+
+-- !incremental_earliest_4 --
+4000
+
+-- !incremental_3_4 --
+1000
+
+-- !incremental_5_end --
+5000
+
+-- !incremental_earliest_5 --
+5000
+
+-- !incremental_4_5 --
+1000
+
+-- !incremental_6_end --
+4000
+
+-- !incremental_earliest_6 --
+6000
+
+-- !incremental_5_6 --
+1000
+
+-- !incremental_7_end --
+3000
+
+-- !incremental_earliest_7 --
+7000
+
+-- !incremental_6_7 --
+1000
+
+-- !incremental_8_end --
+2000
+
+-- !incremental_earliest_8 --
+8000
+
+-- !incremental_7_8 --
+1000
+
+-- !incremental_9_end --
+1000
+
+-- !incremental_earliest_9 --
+9000
+
+-- !incremental_8_9 --
+1000
+
+-- !incremental_10_end --
+0
+
+-- !incremental_earliest_10 --
+10000
+
+-- !incremental_9_10 --
+1000
+
+-- !incremental_1_end --
+9000
+
+-- !incremental_earliest_1 --
+1000
+
+-- !incremental_2_end --
+8000
+
+-- !incremental_earliest_2 --
+2000
+
+-- !incremental_1_2 --
+1000
+
+-- !incremental_3_end --
+7000
+
+-- !incremental_earliest_3 --
+3000
+
+-- !incremental_2_3 --
+1000
+
+-- !incremental_4_end --
+6000
+
+-- !incremental_earliest_4 --
+4000
+
+-- !incremental_3_4 --
+1000
+
+-- !incremental_5_end --
+5000
+
+-- !incremental_earliest_5 --
+5000
+
+-- !incremental_4_5 --
+1000
+
+-- !incremental_6_end --
+4000
+
+-- !incremental_earliest_6 --
+6000
+
+-- !incremental_5_6 --
+1000
+
+-- !incremental_7_end --
+3000
+
+-- !incremental_earliest_7 --
+7000
+
+-- !incremental_6_7 --
+1000
+
+-- !incremental_8_end --
+2000
+
+-- !incremental_earliest_8 --
+8000
+
+-- !incremental_7_8 --
+1000
+
+-- !incremental_9_end --
+1000
+
+-- !incremental_earliest_9 --
+9000
+
+-- !incremental_8_9 --
+1000
+
+-- !incremental_10_end --
+0
+
+-- !incremental_earliest_10 --
+10000
+
+-- !incremental_9_10 --
+1000
+
+-- !incremental_1_end --
+9000
+
+-- !incremental_earliest_1 --
+1000
+
+-- !incremental_2_end --
+8000
+
+-- !incremental_earliest_2 --
+2000
+
+-- !incremental_1_2 --
+1000
+
+-- !incremental_3_end --
+7000
+
+-- !incremental_earliest_3 --
+3000
+
+-- !incremental_2_3 --
+1000
+
+-- !incremental_4_end --
+6000
+
+-- !incremental_earliest_4 --
+4000
+
+-- !incremental_3_4 --
+1000
+
+-- !incremental_5_end --
+5000
+
+-- !incremental_earliest_5 --
+5000
+
+-- !incremental_4_5 --
+1000
+
+-- !incremental_6_end --
+4000
+
+-- !incremental_earliest_6 --
+6000
+
+-- !incremental_5_6 --
+1000
+
+-- !incremental_7_end --
+3000
+
+-- !incremental_earliest_7 --
+7000
+
+-- !incremental_6_7 --
+1000
+
+-- !incremental_8_end --
+2000
+
+-- !incremental_earliest_8 --
+8000
+
+-- !incremental_7_8 --
+1000
+
+-- !incremental_9_end --
+1000
+
+-- !incremental_earliest_9 --
+9000
+
+-- !incremental_8_9 --
+1000
+
+-- !incremental_10_end --
+0
+
+-- !incremental_earliest_10 --
+10000
+
+-- !incremental_9_10 --
+1000
+
+-- !incremental_1_end --
+9000
+
+-- !incremental_earliest_1 --
+1000
+
+-- !incremental_2_end --
+8000
+
+-- !incremental_earliest_2 --
+2000
+
+-- !incremental_1_2 --
+1000
+
+-- !incremental_3_end --
+7000
+
+-- !incremental_earliest_3 --
+3000
+
+-- !incremental_2_3 --
+1000
+
+-- !incremental_4_end --
+6000
+
+-- !incremental_earliest_4 --
+4000
+
+-- !incremental_3_4 --
+1000
+
+-- !incremental_5_end --
+5000
+
+-- !incremental_earliest_5 --
+5000
+
+-- !incremental_4_5 --
+1000
+
+-- !incremental_6_end --
+4000
+
+-- !incremental_earliest_6 --
+6000
+
+-- !incremental_5_6 --
+1000
+
+-- !incremental_7_end --
+3000
+
+-- !incremental_earliest_7 --
+7000
+
+-- !incremental_6_7 --
+1000
+
+-- !incremental_8_end --
+2000
+
+-- !incremental_earliest_8 --
+8000
+
+-- !incremental_7_8 --
+1000
+
+-- !incremental_9_end --
+1000
+
+-- !incremental_earliest_9 --
+9000
+
+-- !incremental_8_9 --
+1000
+
+-- !incremental_10_end --
+0
+
+-- !incremental_earliest_10 --
+10000
+
+-- !incremental_9_10 --
+1000
+
diff --git 
a/regression-test/data/external_table_p2/hudi/test_hudi_schema_evolution.out 
b/regression-test/data/external_table_p2/hudi/test_hudi_schema_evolution.out
new file mode 100644
index 00000000000..12dd0cf086d
--- /dev/null
+++ b/regression-test/data/external_table_p2/hudi/test_hudi_schema_evolution.out
@@ -0,0 +1,33 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !adding_simple_columns_table --
+20241118012126237      20241118012126237_0_1   1               
5166112a-90d8-4ba8-8646-337fbeb2a375-0_0-35-121_20241118012132306.parquet       
1       Alice   \N
+20241118012126237      20241118012126237_0_0   2               
5166112a-90d8-4ba8-8646-337fbeb2a375-0_0-35-121_20241118012132306.parquet       
2       Bob     \N
+20241118012126237      20241118012126237_0_2   3               
5166112a-90d8-4ba8-8646-337fbeb2a375-0_0-35-121_20241118012132306.parquet       
3       Cathy   \N
+20241118012132306      20241118012132306_0_3   4               
5166112a-90d8-4ba8-8646-337fbeb2a375-0_0-35-121_20241118012132306.parquet       
4       David   25
+20241118012132306      20241118012132306_0_4   5               
5166112a-90d8-4ba8-8646-337fbeb2a375-0_0-35-121_20241118012132306.parquet       
5       Eva     30
+20241118012132306      20241118012132306_0_5   6               
5166112a-90d8-4ba8-8646-337fbeb2a375-0_0-35-121_20241118012132306.parquet       
6       Frank   28
+
+-- !altering_simple_columns_table --
+20241118012136512      20241118012136512_0_0   1               
203f0f43-ae9d-4c17-8d5d-834f0dbc62c9-0_0-78-246_20241118012138287.parquet       
1       Alice   25.0
+20241118012136512      20241118012136512_0_2   2               
203f0f43-ae9d-4c17-8d5d-834f0dbc62c9-0_0-78-246_20241118012138287.parquet       
2       Bob     30.0
+20241118012136512      20241118012136512_0_1   3               
203f0f43-ae9d-4c17-8d5d-834f0dbc62c9-0_0-78-246_20241118012138287.parquet       
3       Cathy   28.0
+20241118012138287      20241118012138287_0_3   4               
203f0f43-ae9d-4c17-8d5d-834f0dbc62c9-0_0-78-246_20241118012138287.parquet       
4       David   26.0
+20241118012138287      20241118012138287_0_4   5               
203f0f43-ae9d-4c17-8d5d-834f0dbc62c9-0_0-78-246_20241118012138287.parquet       
5       Eva     31.5
+20241118012138287      20241118012138287_0_5   6               
203f0f43-ae9d-4c17-8d5d-834f0dbc62c9-0_0-78-246_20241118012138287.parquet       
6       Frank   29.2
+
+-- !adding_complex_columns_table --
+20241118012144831      20241118012144831_0_1   1               
3c038df9-a652-4878-9b8a-221ae443448e-0_0-165-497_20241118012146150.parquet      
1       Alice   {"age":25, "address":"Guangzhou", "email":null}
+20241118012144831      20241118012144831_0_0   2               
3c038df9-a652-4878-9b8a-221ae443448e-0_0-165-497_20241118012146150.parquet      
2       Bob     {"age":30, "address":"Shanghai", "email":null}
+20241118012144831      20241118012144831_0_2   3               
3c038df9-a652-4878-9b8a-221ae443448e-0_0-165-497_20241118012146150.parquet      
3       Cathy   {"age":28, "address":"Beijing", "email":null}
+20241118012146150      20241118012146150_0_3   4               
3c038df9-a652-4878-9b8a-221ae443448e-0_0-165-497_20241118012146150.parquet      
4       David   {"age":25, "address":"Shenzhen", "email":"da...@example.com"}
+20241118012146150      20241118012146150_0_4   5               
3c038df9-a652-4878-9b8a-221ae443448e-0_0-165-497_20241118012146150.parquet      
5       Eva     {"age":30, "address":"Chengdu", "email":"e...@example.com"}
+20241118012146150      20241118012146150_0_5   6               
3c038df9-a652-4878-9b8a-221ae443448e-0_0-165-497_20241118012146150.parquet      
6       Frank   {"age":28, "address":"Wuhan", "email":"fr...@example.com"}
+
+-- !altering_complex_columns_table --
+20241118012147879      20241118012147879_0_0   1               
185d101f-a484-45ce-b236-03ccd33c521b-0_0-208-622_20241118012149007.parquet      
1       Alice   {"age":25, "address":"Guangzhou"}
+20241118012147879      20241118012147879_0_2   2               
185d101f-a484-45ce-b236-03ccd33c521b-0_0-208-622_20241118012149007.parquet      
2       Bob     {"age":30, "address":"Shanghai"}
+20241118012147879      20241118012147879_0_1   3               
185d101f-a484-45ce-b236-03ccd33c521b-0_0-208-622_20241118012149007.parquet      
3       Cathy   {"age":28, "address":"Beijing"}
+20241118012149007      20241118012149007_0_3   4               
185d101f-a484-45ce-b236-03ccd33c521b-0_0-208-622_20241118012149007.parquet      
4       David   {"age":26, "address":"Shenzhen"}
+20241118012149007      20241118012149007_0_4   5               
185d101f-a484-45ce-b236-03ccd33c521b-0_0-208-622_20241118012149007.parquet      
5       Eva     {"age":31.5, "address":"Chengdu"}
+20241118012149007      20241118012149007_0_5   6               
185d101f-a484-45ce-b236-03ccd33c521b-0_0-208-622_20241118012149007.parquet      
6       Frank   {"age":29.2, "address":"Wuhan"}
+
diff --git a/regression-test/data/external_table_p2/hudi/test_hudi_snapshot.out 
b/regression-test/data/external_table_p2/hudi/test_hudi_snapshot.out
new file mode 100644
index 00000000000..efad67ffbfa
Binary files /dev/null and 
b/regression-test/data/external_table_p2/hudi/test_hudi_snapshot.out differ
diff --git 
a/regression-test/data/external_table_p2/hudi/test_hudi_timestamp.out 
b/regression-test/data/external_table_p2/hudi/test_hudi_timestamp.out
new file mode 100644
index 00000000000..dc47ff86d90
--- /dev/null
+++ b/regression-test/data/external_table_p2/hudi/test_hudi_timestamp.out
@@ -0,0 +1,6 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !timestamp --
+20241115015956800      20241115015956800_0_2   1               
eec4913a-0d5f-4b8b-a0f5-934e252c2e45-0_0-7-14_20241115015956800.parquet 1       
Alice   2024-10-25T08:00
+20241115015956800      20241115015956800_0_0   2               
eec4913a-0d5f-4b8b-a0f5-934e252c2e45-0_0-7-14_20241115015956800.parquet 2       
Bob     2024-10-25T09:30:00
+20241115015956800      20241115015956800_0_1   3               
eec4913a-0d5f-4b8b-a0f5-934e252c2e45-0_0-7-14_20241115015956800.parquet 3       
Charlie 2024-10-25T11:00:00
+
diff --git 
a/regression-test/data/external_table_p2/hudi/test_hudi_timetravel.out 
b/regression-test/data/external_table_p2/hudi/test_hudi_timetravel.out
index 38b6ff7846f..a9b5d23595a 100644
--- a/regression-test/data/external_table_p2/hudi/test_hudi_timetravel.out
+++ b/regression-test/data/external_table_p2/hudi/test_hudi_timetravel.out
@@ -1,125 +1,121 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
--- !q00 --
-20240724195843565      20240724195843565_0_0   20240724195843565_0_0   
PAR1=para/par2=para     
7a788b37-9ef0-409a-bc42-6793e35fcad5-0_0-129-127_20240724195848377.parquet      
1       a       b       para    para
-20240724195845718      20240724195845718_0_0   20240724195845718_0_0   
PAR1=para/par2=parb     
fef19b36-4a18-4d8c-b204-1ed448f7de51-0_0-152-155_20240724195850799.parquet      
2       a       b       para    parb
-20240724195848377      20240724195848377_0_1   20240724195848377_0_0   
PAR1=para/par2=para     
7a788b37-9ef0-409a-bc42-6793e35fcad5-0_0-129-127_20240724195848377.parquet      
3       a       b       para    para
-20240724195850799      20240724195850799_0_1   20240724195850799_0_0   
PAR1=para/par2=parb     
fef19b36-4a18-4d8c-b204-1ed448f7de51-0_0-152-155_20240724195850799.parquet      
4       a       b       para    parb
+-- !timetravel1 --
+1000
 
--- !q01 --
+-- !timetravel2 --
+2000
 
--- !q02 --
+-- !timetravel3 --
+3000
 
--- !q01 --
-20240724195843565      20240724195843565_0_0   20240724195843565_0_0   
PAR1=para/par2=para     
7a788b37-9ef0-409a-bc42-6793e35fcad5-0_0-129-127_20240724195848377.parquet      
1       a       b       para    para
-20240724195845718      20240724195845718_0_0   20240724195845718_0_0   
PAR1=para/par2=parb     
fef19b36-4a18-4d8c-b204-1ed448f7de51-0_0-152-155_20240724195850799.parquet      
2       a       b       para    parb
-20240724195848377      20240724195848377_0_1   20240724195848377_0_0   
PAR1=para/par2=para     
7a788b37-9ef0-409a-bc42-6793e35fcad5-0_0-129-127_20240724195848377.parquet      
3       a       b       para    para
-20240724195850799      20240724195850799_0_1   20240724195850799_0_0   
PAR1=para/par2=parb     
fef19b36-4a18-4d8c-b204-1ed448f7de51-0_0-152-155_20240724195850799.parquet      
4       a       b       para    parb
-
--- !q02 --
-20240724195843565      20240724195843565_0_0   20240724195843565_0_0   
PAR1=para/par2=para     
7a788b37-9ef0-409a-bc42-6793e35fcad5-0_0-129-127_20240724195848377.parquet      
1       a       b       para    para
-20240724195845718      20240724195845718_0_0   20240724195845718_0_0   
PAR1=para/par2=parb     
fef19b36-4a18-4d8c-b204-1ed448f7de51-0_0-152-155_20240724195850799.parquet      
2       a       b       para    parb
-20240724195848377      20240724195848377_0_1   20240724195848377_0_0   
PAR1=para/par2=para     
7a788b37-9ef0-409a-bc42-6793e35fcad5-0_0-129-127_20240724195848377.parquet      
3       a       b       para    para
-20240724195850799      20240724195850799_0_1   20240724195850799_0_0   
PAR1=para/par2=parb     
fef19b36-4a18-4d8c-b204-1ed448f7de51-0_0-152-155_20240724195850799.parquet      
4       a       b       para    parb
-
--- !q03 --
-
--- !q04 --
-
--- !q05 --
-1      a       b       para    para
-
--- !q06 --
-1      a       b       para    para
-
--- !q07 --
-1      a       b       para    para
-2      a       b       para    parb
-
--- !q08 --
-1      a       b       para    para
-2      a       b       para    parb
-
--- !q09 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-
--- !q10 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-
--- !q11 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-4      a       b       para    parb
-
--- !q12 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-4      a       b       para    parb
-
--- !q50 --
-20240724195853736      20240724195853736_0_0   Id:1    PAR1=para/par2=para     
c5a8ebb7-f929-43ba-9f8d-d733fae27605-0_0-203-210_20240724195858450.parquet      
1       a       b       para    para
-20240724195856338      20240724195856338_0_0   Id:2    PAR1=para/par2=parb     
23756678-cf81-481c-b559-85c0b47b0a80-0_0-219-228_20240724195902682.parquet      
2       a       b       para    parb
-20240724195858450      20240724195858450_0_1   Id:3    PAR1=para/par2=para     
c5a8ebb7-f929-43ba-9f8d-d733fae27605-0_0-203-210_20240724195858450.parquet      
3       a       b       para    para
-20240724195902682      20240724195902682_0_1   Id:4    PAR1=para/par2=parb     
23756678-cf81-481c-b559-85c0b47b0a80-0_0-219-228_20240724195902682.parquet      
4       a       b       para    parb
-
--- !q51 --
-
--- !q52 --
-
--- !q51 --
-20240724195853736      20240724195853736_0_0   Id:1    PAR1=para/par2=para     
c5a8ebb7-f929-43ba-9f8d-d733fae27605-0_0-203-210_20240724195858450.parquet      
1       a       b       para    para
-20240724195856338      20240724195856338_0_0   Id:2    PAR1=para/par2=parb     
23756678-cf81-481c-b559-85c0b47b0a80-0_0-219-228_20240724195902682.parquet      
2       a       b       para    parb
-20240724195858450      20240724195858450_0_1   Id:3    PAR1=para/par2=para     
c5a8ebb7-f929-43ba-9f8d-d733fae27605-0_0-203-210_20240724195858450.parquet      
3       a       b       para    para
-20240724195902682      20240724195902682_0_1   Id:4    PAR1=para/par2=parb     
23756678-cf81-481c-b559-85c0b47b0a80-0_0-219-228_20240724195902682.parquet      
4       a       b       para    parb
-
--- !q52 --
-20240724195853736      20240724195853736_0_0   Id:1    PAR1=para/par2=para     
c5a8ebb7-f929-43ba-9f8d-d733fae27605-0_0-203-210_20240724195858450.parquet      
1       a       b       para    para
-20240724195856338      20240724195856338_0_0   Id:2    PAR1=para/par2=parb     
23756678-cf81-481c-b559-85c0b47b0a80-0_0-219-228_20240724195902682.parquet      
2       a       b       para    parb
-20240724195858450      20240724195858450_0_1   Id:3    PAR1=para/par2=para     
c5a8ebb7-f929-43ba-9f8d-d733fae27605-0_0-203-210_20240724195858450.parquet      
3       a       b       para    para
-20240724195902682      20240724195902682_0_1   Id:4    PAR1=para/par2=parb     
23756678-cf81-481c-b559-85c0b47b0a80-0_0-219-228_20240724195902682.parquet      
4       a       b       para    parb
-
--- !q53 --
-
--- !q54 --
-
--- !q55 --
-1      a       b       para    para
-
--- !q56 --
-1      a       b       para    para
-
--- !q57 --
-1      a       b       para    para
-2      a       b       para    parb
-
--- !q58 --
-1      a       b       para    para
-2      a       b       para    parb
-
--- !q59 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-
--- !q60 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-
--- !q61 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-4      a       b       para    parb
-
--- !q62 --
-1      a       b       para    para
-2      a       b       para    parb
-3      a       b       para    para
-4      a       b       para    parb
+-- !timetravel4 --
+4000
+
+-- !timetravel5 --
+5000
+
+-- !timetravel6 --
+6000
+
+-- !timetravel7 --
+7000
+
+-- !timetravel8 --
+8000
+
+-- !timetravel9 --
+9000
+
+-- !timetravel10 --
+10000
+
+-- !timetravel1 --
+1000
+
+-- !timetravel2 --
+2000
+
+-- !timetravel3 --
+3000
+
+-- !timetravel4 --
+4000
+
+-- !timetravel5 --
+5000
+
+-- !timetravel6 --
+6000
+
+-- !timetravel7 --
+7000
+
+-- !timetravel8 --
+8000
+
+-- !timetravel9 --
+9000
+
+-- !timetravel10 --
+10000
+
+-- !timetravel1 --
+1000
+
+-- !timetravel2 --
+2000
+
+-- !timetravel3 --
+3000
+
+-- !timetravel4 --
+4000
+
+-- !timetravel5 --
+5000
+
+-- !timetravel6 --
+6000
+
+-- !timetravel7 --
+7000
+
+-- !timetravel8 --
+8000
+
+-- !timetravel9 --
+9000
+
+-- !timetravel10 --
+10000
+
+-- !timetravel1 --
+1000
+
+-- !timetravel2 --
+2000
+
+-- !timetravel3 --
+3000
+
+-- !timetravel4 --
+4000
+
+-- !timetravel5 --
+5000
+
+-- !timetravel6 --
+6000
+
+-- !timetravel7 --
+7000
+
+-- !timetravel8 --
+8000
+
+-- !timetravel9 --
+9000
+
+-- !timetravel10 --
+10000
 
diff --git 
a/regression-test/suites/external_table_p2/hudi/test_hudi_catalog.groovy 
b/regression-test/suites/external_table_p2/hudi/test_hudi_catalog.groovy
new file mode 100644
index 00000000000..f2082ef89c7
--- /dev/null
+++ b/regression-test/suites/external_table_p2/hudi/test_hudi_catalog.groovy
@@ -0,0 +1,39 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hudi_catalog", 
"p2,external,hudi,external_remote,external_remote_hudi") {
+    String enabled = context.config.otherConfigs.get("enableExternalHudiTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable hudi test")
+    }
+
+    String catalog_name = "test_hudi_catalog"
+    String props = context.config.otherConfigs.get("hudiEmrCatalog")
+    sql """drop catalog if exists ${catalog_name};"""
+    sql """
+        create catalog if not exists ${catalog_name} properties (
+            ${props}
+        );
+    """
+
+    sql """ switch ${catalog_name};"""
+    sql """ use regression_hudi;""" 
+    sql """ set enable_fallback_to_original_planner=false """
+    def tables = sql """ show tables; """
+    assertTrue(tables.size() > 0)
+    sql """drop catalog if exists ${catalog_name};"""
+}
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/hudi/test_hudi_incremental.groovy 
b/regression-test/suites/external_table_p2/hudi/test_hudi_incremental.groovy
new file mode 100644
index 00000000000..8cc1d2a852b
--- /dev/null
+++ b/regression-test/suites/external_table_p2/hudi/test_hudi_incremental.groovy
@@ -0,0 +1,111 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hudi_incremental", 
"p2,external,hudi,external_remote,external_remote_hudi") {
+    String enabled = context.config.otherConfigs.get("enableExternalHudiTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable hudi test")
+    }
+
+    String catalog_name = "test_hudi_incremental"
+    String props = context.config.otherConfigs.get("hudiEmrCatalog")
+    sql """drop catalog if exists ${catalog_name};"""
+    sql """
+        create catalog if not exists ${catalog_name} properties (
+            ${props}
+        );
+    """
+
+    sql """ switch ${catalog_name};"""
+    sql """ use regression_hudi;""" 
+    sql """ set enable_fallback_to_original_planner=false """
+
+    def test_hudi_incremental_querys = { table_name, timestamps ->
+        timestamps.eachWithIndex { timestamp, index ->
+            def query_name = "qt_incremental_${index + 1}_end"
+            "${query_name}" """ select count(user_id) from 
${table_name}@incr('beginTime' = '${timestamp}'); """
+            query_name = "qt_incremental_earliest_${index + 1}"
+            "${query_name}" """ select count(user_id) from 
${table_name}@incr('beginTime' = 'earliest', 'endTime' = '${timestamp}'); """
+            if (index > 0) {
+                query_name = "qt_incremental_${index}_${index + 1}"
+                "${query_name}" """ select count(user_id) from 
${table_name}@incr('beginTime' = '${timestamps[index - 1]}', 'endTime' = 
'${timestamp}'); """
+            }
+        }
+    }
+
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_cow_non_partition order by _hoodie_commit_time;"
+    def timestamps_cow_non_partition = [
+        "20241114151946599",
+        "20241114151952471",
+        "20241114151956317",
+        "20241114151958164",
+        "20241114152000425",
+        "20241114152004116",
+        "20241114152005954",
+        "20241114152007945",
+        "20241114152009764",
+        "20241114152011901",
+    ]
+    test_hudi_incremental_querys("user_activity_log_cow_non_partition", 
timestamps_cow_non_partition)
+
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_cow_partition order by _hoodie_commit_time;"
+    def timestamps_cow_partition = [
+        "20241114152034850",
+        "20241114152042944",
+        "20241114152052682",
+        "20241114152101650",
+        "20241114152110650",
+        "20241114152120030",
+        "20241114152128871",
+        "20241114152137714",
+        "20241114152147114",
+        "20241114152156417",
+    ]
+    test_hudi_incremental_querys("user_activity_log_cow_partition", 
timestamps_cow_partition)
+
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_mor_non_partition order by _hoodie_commit_time;"
+    def timestamps_mor_non_partition = [
+        "20241114152014186",
+        "20241114152015753",
+        "20241114152017539",
+        "20241114152019371",
+        "20241114152020915",
+        "20241114152022911",
+        "20241114152024706",
+        "20241114152026873",
+        "20241114152028770",
+        "20241114152030746",
+    ]
+    test_hudi_incremental_querys("user_activity_log_mor_non_partition", 
timestamps_mor_non_partition)
+
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_mor_partition order by _hoodie_commit_time;"
+    def timestamps_mor_partition = [
+        "20241114152207700",
+        "20241114152214609",
+        "20241114152223933",
+        "20241114152232579",
+        "20241114152241610",
+        "20241114152252244",
+        "20241114152302763",
+        "20241114152313010",
+        "20241114152323587",
+        "20241114152334111",
+    ]
+    test_hudi_incremental_querys("user_activity_log_mor_partition", 
timestamps_mor_partition)
+
+    sql """drop catalog if exists ${catalog_name};"""
+}
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/hudi/test_hudi_schema_evolution.groovy
 
b/regression-test/suites/external_table_p2/hudi/test_hudi_schema_evolution.groovy
new file mode 100644
index 00000000000..b247aaf4924
--- /dev/null
+++ 
b/regression-test/suites/external_table_p2/hudi/test_hudi_schema_evolution.groovy
@@ -0,0 +1,48 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hudi_schema_evolution", 
"p2,external,hudi,external_remote,external_remote_hudi") {
+    String enabled = context.config.otherConfigs.get("enableExternalHudiTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable hudi test")
+    }
+
+    String catalog_name = "test_hudi_schema_evolution"
+    String props = context.config.otherConfigs.get("hudiEmrCatalog")
+    sql """drop catalog if exists ${catalog_name};"""
+    sql """
+        create catalog if not exists ${catalog_name} properties (
+            ${props}
+        );
+    """
+
+    sql """ switch ${catalog_name};"""
+    sql """ use regression_hudi;""" 
+    sql """ set enable_fallback_to_original_planner=false """
+
+    qt_adding_simple_columns_table """ select * from 
adding_simple_columns_table order by id """
+    qt_altering_simple_columns_table """ select * from 
altering_simple_columns_table order by id """
+    // qt_deleting_simple_columns_table """ select * from 
deleting_simple_columns_table order by id """
+    // qt_renaming_simple_columns_table """ select * from 
renaming_simple_columns_table order by id """
+
+    qt_adding_complex_columns_table """ select * from 
adding_complex_columns_table order by id """
+    qt_altering_complex_columns_table """ select * from 
altering_complex_columns_table order by id """
+    // qt_deleting_complex_columns_table """ select * from 
deleting_complex_columns_table order by id """
+    // qt_renaming_complex_columns_table """ select * from 
renaming_complex_columns_table order by id """
+
+    sql """drop catalog if exists ${catalog_name};"""
+}
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/hudi/test_hudi_snapshot.groovy 
b/regression-test/suites/external_table_p2/hudi/test_hudi_snapshot.groovy
new file mode 100644
index 00000000000..53c09e6d5a9
--- /dev/null
+++ b/regression-test/suites/external_table_p2/hudi/test_hudi_snapshot.groovy
@@ -0,0 +1,91 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hudi_snapshot", 
"p2,external,hudi,external_remote,external_remote_hudi") {
+    String enabled = context.config.otherConfigs.get("enableExternalHudiTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable hudi test")
+    }
+
+    String catalog_name = "test_hudi_snapshot"
+    String props = context.config.otherConfigs.get("hudiEmrCatalog")
+    sql """drop catalog if exists ${catalog_name};"""
+    sql """
+        create catalog if not exists ${catalog_name} properties (
+            ${props}
+        );
+    """
+
+    sql """ switch ${catalog_name};"""
+    sql """ use regression_hudi;""" 
+    sql """ set enable_fallback_to_original_planner=false """
+
+    // 创建groovy函数,接收table_name为参数
+    def test_hudi_snapshot_querys = { table_name ->
+        // Query users by event_time in descending order and limit output
+        qt_q01 """SELECT * FROM ${table_name} ORDER BY event_time DESC LIMIT 
10;"""
+
+        // Query all active user records and limit output
+        qt_q02 """SELECT * FROM ${table_name} WHERE is_active = TRUE ORDER BY 
event_time LIMIT 10;"""
+
+        // Query specific user's activity records and limit output
+        qt_q03 """SELECT * FROM ${table_name} WHERE user_id = 
'62785e0e-ad44-4321-8b20-9ee4c4daca4a' ORDER BY event_time LIMIT 5;"""
+
+        // Query events within a specific time range and limit output
+        qt_q04 """SELECT * FROM ${table_name} WHERE event_time BETWEEN 
'2024-01-01 00:00:00' AND '2024-12-31 23:59:59' ORDER BY event_time LIMIT 10;"""
+
+        // Count users by age group and limit output
+        qt_q05 """SELECT age, COUNT(*) AS user_count FROM ${table_name} GROUP 
BY age ORDER BY user_count DESC LIMIT 5;"""
+
+        // Query users with purchase records and limit output
+        qt_q06 """SELECT user_id, purchases FROM ${table_name} WHERE 
array_size(purchases) > 0 ORDER BY user_id LIMIT 5;"""
+
+        // Query users with a specific tag and limit output
+        qt_q07 """SELECT * FROM ${table_name} WHERE array_contains(tags, 
'others') ORDER BY event_time LIMIT 5;"""
+
+        // Query users living in a specific city and limit output
+        qt_q08 """SELECT * FROM ${table_name} WHERE struct_element(address, 
'city') = 'North Rachelview' ORDER BY event_time LIMIT 5;"""
+
+        // Query users within a specific coordinate range and limit output
+        qt_q09 """SELECT * FROM ${table_name} WHERE 
struct_element(struct_element(address, 'coordinates'), 'latitude') BETWEEN 0 
AND 100 AND struct_element(struct_element(address, 'coordinates'), 'longitude') 
BETWEEN 0 AND 100 ORDER BY event_time LIMIT 5;"""
+
+        // Query records with ratings above a specific value and limit output
+        qt_q10 """SELECT * FROM ${table_name} WHERE rating > 4.5 ORDER BY 
rating DESC LIMIT 5;"""
+
+        // Query all users' signup dates and limit output
+        qt_q11 """SELECT user_id, signup_date FROM ${table_name} ORDER BY 
signup_date DESC LIMIT 10;"""
+
+        // Query users with a specific postal code and limit output
+        qt_q12 """SELECT * FROM ${table_name} WHERE struct_element(address, 
'postal_code') = '80312' ORDER BY event_time LIMIT 5;"""
+
+        // Query users with profile pictures and limit output
+        qt_q13 """SELECT user_id, profile_picture FROM ${table_name} WHERE 
profile_picture IS NOT NULL ORDER BY user_id LIMIT 5;"""
+
+        // Query users by signup date and limit output
+        qt_q14 """SELECT * FROM ${table_name} WHERE signup_date = '2024-01-15' 
ORDER BY user_id LIMIT 5;"""
+
+        // Query the total count of purchases for each user and limit output
+        qt_q15 """SELECT user_id, array_size(purchases) AS purchase_count FROM 
${table_name} ORDER BY purchase_count DESC LIMIT 5;"""
+    }
+
+    test_hudi_snapshot_querys("user_activity_log_cow_non_partition")
+    test_hudi_snapshot_querys("user_activity_log_cow_partition")
+    test_hudi_snapshot_querys("user_activity_log_mor_non_partition")
+    test_hudi_snapshot_querys("user_activity_log_mor_partition")
+
+    sql """drop catalog if exists ${catalog_name};"""
+}
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/hudi/test_hudi_timestamp.groovy 
b/regression-test/suites/external_table_p2/hudi/test_hudi_timestamp.groovy
new file mode 100644
index 00000000000..c1ba630e4a7
--- /dev/null
+++ b/regression-test/suites/external_table_p2/hudi/test_hudi_timestamp.groovy
@@ -0,0 +1,62 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hudi_timestamp", 
"p2,external,hudi,external_remote,external_remote_hudi") {
+    String enabled = context.config.otherConfigs.get("enableExternalHudiTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable hudi test")
+    }
+
+    String catalog_name = "test_hudi_timestamp"
+    String props = context.config.otherConfigs.get("hudiEmrCatalog")
+    sql """drop catalog if exists ${catalog_name};"""
+    sql """
+        create catalog if not exists ${catalog_name} properties (
+            ${props}
+        );
+    """
+
+    sql """ switch ${catalog_name};"""
+    sql """ use regression_hudi;""" 
+    sql """ set enable_fallback_to_original_planner=false """
+
+    // TODO: fix hudi timezone issue and enable this
+    // qt_timestamp """ select * from hudi_table_with_timestamp order by id; 
"""
+
+    sql """drop catalog if exists ${catalog_name};"""
+}
+
+// DROP TABLE IF EXISTS hudi_table_with_timestamp;
+
+// -- create table
+// CREATE TABLE hudi_table_with_timestamp (
+//   id STRING,
+//   name STRING,
+//   event_time TIMESTAMP
+// ) USING HUDI
+// OPTIONS (
+//   type = 'cow',
+//   primaryKey = 'id',
+//   preCombineField = 'event_time'
+// );
+
+// SET TIME ZONE 'America/Los_Angeles';
+
+// INSERT OVERWRITE hudi_table_with_timestamp VALUES
+// ('1', 'Alice', timestamp('2024-10-25 08:00:00')),
+// ('2', 'Bob', timestamp('2024-10-25 09:30:00')),
+// ('3', 'Charlie', timestamp('2024-10-25 11:00:00'));
\ No newline at end of file
diff --git 
a/regression-test/suites/external_table_p2/hudi/test_hudi_timetravel.groovy 
b/regression-test/suites/external_table_p2/hudi/test_hudi_timetravel.groovy
index db535e35179..4d458dc4381 100644
--- a/regression-test/suites/external_table_p2/hudi/test_hudi_timetravel.groovy
+++ b/regression-test/suites/external_table_p2/hudi/test_hudi_timetravel.groovy
@@ -16,13 +16,6 @@
 // under the License.
 
 suite("test_hudi_timetravel", 
"p2,external,hudi,external_remote,external_remote_hudi") {
-
-    Boolean ignoreP2 = true;
-    if (ignoreP2) {
-        logger.info("disable p2 test");
-        return;
-    }
-
     String enabled = context.config.otherConfigs.get("enableExternalHudiTest")
     if (enabled == null || !enabled.equalsIgnoreCase("true")) {
         logger.info("disable hudi test")
@@ -37,77 +30,76 @@ suite("test_hudi_timetravel", 
"p2,external,hudi,external_remote,external_remote_
         );
     """
 
-    sql """switch ${catalog_name};"""
+    sql """ switch ${catalog_name};"""
     sql """ use regression_hudi;""" 
     sql """ set enable_fallback_to_original_planner=false """
 
-    qt_q00 """select * from timetravel_cow order by id"""
-    qt_q01 """select * from timetravel_cow FOR TIME AS OF "2024-07-24" order 
by id""" // no data
-    qt_q02 """select * from timetravel_cow FOR TIME AS OF "20240724" order by 
id""" // no data
-    qt_q01 """select * from timetravel_cow FOR TIME AS OF "2024-07-25" order 
by id"""
-    qt_q02 """select * from timetravel_cow FOR TIME AS OF "20240725" order by 
id"""
-    qt_q03 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "2024-07-24 19:58:43" order by id """  // no data
-    qt_q04 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "20240724195843" order by id """ // no data
-    qt_q05 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "2024-07-24 19:58:44" order by id """ // one
-    qt_q06 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "20240724195844" order by id """ //one 
-    qt_q07 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "2024-07-24 19:58:48" order by id """ // two
-    qt_q08 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "20240724195848" order by id """ // two
-    qt_q09 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "2024-07-24 19:58:49" order by id """ // three
-    qt_q10 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "20240724195849" order by id """ // three
-    qt_q11 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "2024-07-24 19:58:51" order by id """ // four
-    qt_q12 """ select id, val1,val2,par1,par2 from timetravel_cow FOR TIME AS 
OF "20240724195851" order by id """ // four
-
-    qt_q50 """select * from timetravel_mor order by id"""
-    qt_q51 """select * from timetravel_mor FOR TIME AS OF "2024-07-24" order 
by id""" // no data
-    qt_q52 """select * from timetravel_mor FOR TIME AS OF "20240724" order by 
id""" // no data
-    qt_q51 """select * from timetravel_mor FOR TIME AS OF "2024-07-25" order 
by id"""
-    qt_q52 """select * from timetravel_mor FOR TIME AS OF "20240725" order by 
id"""
-    qt_q53 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "2024-07-24 19:58:53" order by id """  // no data
-    qt_q54 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "20240724195853" order by id """ // no data
-    qt_q55 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "2024-07-24 19:58:54" order by id """ // one
-    qt_q56 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "20240724195854" order by id """ //one 
-    qt_q57 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "2024-07-24 19:58:58" order by id """ // two
-    qt_q58 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "20240724195858" order by id """ // two
-    qt_q59 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "2024-07-24 19:58:59" order by id """ // three
-    qt_q60 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "20240724195859" order by id """ // three
-    qt_q61 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "2024-07-24 19:59:03" order by id """ // four
-    qt_q62 """ select id, val1,val2,par1,par2 from timetravel_mor FOR TIME AS 
OF "20240724195903" order by id """ // four
-}
-
-
-/*
+    def test_hudi_timetravel_querys = { table_name, timestamps ->
+        timestamps.eachWithIndex { timestamp, index ->
+            def query_name = "qt_timetravel${index + 1}"
+            "${query_name}" """ select count(user_id) from ${table_name} for 
time as of "${timestamp}"; """
+        }
+    }
 
-create table timetravel_cow (
-    Id int,
-    VAL1 string,
-    val2 string,
-    PAR1 string,
-    par2 string
-) using hudi
-partitioned by (par1, par2)
-TBLPROPERTIES (
-  'type' = 'cow');
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_cow_non_partition order by _hoodie_commit_time;"
+    def timestamps_cow_non_partition = [
+        "20241114151946599",
+        "20241114151952471",
+        "20241114151956317",
+        "20241114151958164",
+        "20241114152000425",
+        "20241114152004116",
+        "20241114152005954",
+        "20241114152007945",
+        "20241114152009764",
+        "20241114152011901",
+    ]
+    test_hudi_timetravel_querys("user_activity_log_cow_non_partition", 
timestamps_cow_non_partition)
 
-create table timetravel_mor (
-    Id int,
-    VAL1 string,
-    val2 string,
-    PAR1 string,
-    par2 string
-) using hudi
-partitioned by (par1, par2)
-TBLPROPERTIES (
-  'primaryKey' = 'Id',
-  'type' = 'mor');
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_cow_partition order by _hoodie_commit_time;"
+    def timestamps_cow_partition = [
+        "20241114152034850",
+        "20241114152042944",
+        "20241114152052682",
+        "20241114152101650",
+        "20241114152110650",
+        "20241114152120030",
+        "20241114152128871",
+        "20241114152137714",
+        "20241114152147114",
+        "20241114152156417",
+    ]
+    test_hudi_timetravel_querys("user_activity_log_cow_partition", 
timestamps_cow_partition)
 
-insert into timetravel_cow values (1, 'a','b','para','para');
-insert into timetravel_cow values (2, 'a','b','para','parb');
-insert into timetravel_cow values (3, 'a','b','para','para');
-insert into timetravel_cow values (4, 'a','b','para','parb');
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_mor_non_partition order by _hoodie_commit_time;"
+    def timestamps_mor_non_partition = [
+        "20241114152014186",
+        "20241114152015753",
+        "20241114152017539",
+        "20241114152019371",
+        "20241114152020915",
+        "20241114152022911",
+        "20241114152024706",
+        "20241114152026873",
+        "20241114152028770",
+        "20241114152030746",
+    ]
+    test_hudi_timetravel_querys("user_activity_log_mor_non_partition", 
timestamps_mor_non_partition)
 
-insert into timetravel_mor values (1, 'a','b','para','para');
-insert into timetravel_mor values (2, 'a','b','para','parb');
-insert into timetravel_mor values (3, 'a','b','para','para');
-insert into timetravel_mor values (4, 'a','b','para','parb');
+    // spark-sql "select distinct _hoodie_commit_time from 
user_activity_log_mor_partition order by _hoodie_commit_time;"
+    def timestamps_mor_partition = [
+        "20241114152207700",
+        "20241114152214609",
+        "20241114152223933",
+        "20241114152232579",
+        "20241114152241610",
+        "20241114152252244",
+        "20241114152302763",
+        "20241114152313010",
+        "20241114152323587",
+        "20241114152334111",
+    ]
+    test_hudi_timetravel_querys("user_activity_log_mor_partition", 
timestamps_mor_partition)
 
-*/
+    sql """drop catalog if exists ${catalog_name};"""
+}
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org


Reply via email to