This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 230774bdf6f branch-3.0: [test](kerberos)Add hdfs(tvf),outfile and
export test (#48433) (#48814)
230774bdf6f is described below
commit 230774bdf6ff6cf93b15ca92f0bfaf5ec69f6326
Author: Calvin Kirs <[email protected]>
AuthorDate: Mon Mar 10 12:18:09 2025 +0800
branch-3.0: [test](kerberos)Add hdfs(tvf),outfile and export test (#48433)
(#48814)
#48433
Add hdfs(tvf),outfile and export test
(cherry picked from commit 48fa09096d695c1f760b04ceb834789b1fde63dc)
---
.../kerberos/test_non_catalog_kerberos.out | Bin 0 -> 135 bytes
.../pipeline/external/conf/regression-conf.groovy | 1 +
.../kerberos/test_non_catalog_kerberos.groovy | 146 +++++++++++++++++++++
3 files changed, 147 insertions(+)
diff --git
a/regression-test/data/external_table_p0/kerberos/test_non_catalog_kerberos.out
b/regression-test/data/external_table_p0/kerberos/test_non_catalog_kerberos.out
new file mode 100644
index 00000000000..0e5a0d69003
Binary files /dev/null and
b/regression-test/data/external_table_p0/kerberos/test_non_catalog_kerberos.out
differ
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy
b/regression-test/pipeline/external/conf/regression-conf.groovy
index f8a705f111c..5ffbb020c61 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -171,3 +171,4 @@ enableTrinoConnectorTest = true
enableKerberosTest = true
kerberosHmsPort=9883
kerberosHdfsPort=8820
+enableNonCatalogKerberosTest = true
diff --git
a/regression-test/suites/external_table_p0/kerberos/test_non_catalog_kerberos.groovy
b/regression-test/suites/external_table_p0/kerberos/test_non_catalog_kerberos.groovy
new file mode 100644
index 00000000000..2c4da8b2d20
--- /dev/null
+++
b/regression-test/suites/external_table_p0/kerberos/test_non_catalog_kerberos.groovy
@@ -0,0 +1,146 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.awaitility.Awaitility;
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+suite("test_non_catalog_kerberos",
"p0,external,kerberos,external_docker,external_docker_kerberos") {
+ String enabled =
context.config.otherConfigs.get("enableNonCatalogKerberosTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ return
+ }
+ def String hms_catalog_name = "hms_catalog_kerberos_test_export"
+ def String test_tbl_name="hms_test_table"
+ def keytab_root_dir = "/keytabs"
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+ sql """
+ drop catalog if exists ${hms_catalog_name}
+ """
+ sql """
+ CREATE CATALOG IF NOT EXISTS ${hms_catalog_name}
+ PROPERTIES (
+ "type" = "hms",
+ "ipc.client.fallback-to-simple-auth-allowed" = "true",
+ "hive.metastore.uris" = "thrift://${externalEnvIp}:9583",
+ "hive.metastore.sasl.enabled " = "true",
+ "hive.metastore.kerberos.principal" =
"hive/[email protected]",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.security.auth_to_local" =
"RULE:[2:\$1@\$0](.*@LABS.TERADATA.COM)s/@.*//
+
RULE:[2:\$1@\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+
RULE:[2:\$1@\$0](.*@OTHERREALM.COM)s/@.*//
+ DEFAULT",
+ "hadoop.kerberos.principal" =
"hive/[email protected]",
+ "hadoop.kerberos.min.seconds.before.relogin" = "5",
+ "hadoop.kerberos.keytab.login.autorenewal.enabled" = "false",
+ "hadoop.kerberos.keytab" =
"${keytab_root_dir}/hive-presto-master.keytab",
+ "fs.defaultFS" = "hdfs://${externalEnvIp}:8520"
+ );
+ """
+
+ sql """ switch ${hms_catalog_name} """
+ sql """ use test_krb_hive_db """
+ sql """ drop table if exists ${test_tbl_name}"""
+ sql """
+ CREATE TABLE `${test_tbl_name}` (
+ `c_tinyint` tinyint(4) NULL COMMENT "",
+ `c_smallint` smallint(6) NULL COMMENT ""
+ ) ;
+ """
+ sql """
+ insert into ${test_tbl_name} values(1,2);
+ """
+ qt_select1 "select * from ${test_tbl_name} "
+ def export_task_label= "export_kerberos_test"+ System.currentTimeMillis()
+
+ sql """
+ EXPORT TABLE ${test_tbl_name}
+ TO "hdfs://${externalEnvIp}:8520/user/test/export_"
+ PROPERTIES
+ (
+ "line_delimiter" = ",",
+ "label"="${export_task_label}"
+ )
+ with HDFS (
+ "fs.defaultFS" = "hdfs://${externalEnvIp}:8520",
+ "hadoop.security.auth_to_local" =
"RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*//
+
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+
RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*//
+ DEFAULT",
+ "hadoop.kerberos.min.seconds.before.relogin" = "5",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.keytab.login.autorenewal.enabled"="false",
+
"hadoop.kerberos.principal"="hive/[email protected]",
+ "hadoop.kerberos.keytab" =
"${keytab_root_dir}/hive-presto-master.keytab"
+ );
+ """
+
+ def outfile_result=sql """
+ SELECT * FROM ${test_tbl_name}
+ INTO OUTFILE "hdfs://${externalEnvIp}:8520/user/to/outfile_"
+ FORMAT AS CSV
+ PROPERTIES(
+ "fs.defaultFS" = "hdfs://${externalEnvIp}:8520",
+ "hadoop.security.auth_to_local" =
"RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*//
+
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+
RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*//
+ DEFAULT",
+ "hadoop.kerberos.min.seconds.before.relogin" = "5",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.keytab.login.autorenewal.enabled"="false",
+
"hadoop.kerberos.principal"="hive/[email protected]",
+ "hadoop.kerberos.keytab" =
"${keytab_root_dir}/hive-presto-master.keytab"
+ )
+ """
+
+ println(outfile_result)
+ def hdfslink=outfile_result.get(0).get(3)
+ println hdfslink
+ qt_select1 """
+ select * from hdfs(
+ "uri" = "${hdfslink}",
+ "hadoop.username" = "doris",
+ "format" = "csv",
+ "fs.defaultFS" = "hdfs://${externalEnvIp}:8520",
+ "hadoop.security.auth_to_local" =
"RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*//
+
RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*//
+
RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*//
+ DEFAULT",
+ "hadoop.kerberos.min.seconds.before.relogin" = "5",
+ "hadoop.security.authentication" = "kerberos",
+ "hadoop.kerberos.keytab.login.autorenewal.enabled"="false",
+
"hadoop.kerberos.principal"="hive/[email protected]",
+ "hadoop.kerberos.keytab" =
"${keytab_root_dir}/hive-presto-master.keytab"
+ )
+
+ """
+ Awaitility.await("queery-export-task-result-test").atMost(60,
SECONDS).pollInterval(5, SECONDS).until(
+ {
+ sql """ switch ${hms_catalog_name} """
+ sql """ use test_krb_hive_db """
+ def res = sql """ show export where label =
"${export_task_label}" """
+ if (res[0][2] == "FINISHED") {
+ return true
+ } else if (res[0][2] == "CANCELLED") {
+ throw new IllegalStateException("""export failed:
${res[0][10]}""")
+ } else {
+ return false
+ }
+ }
+ )
+
+
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]