This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new c99eb5d80fd [fix](multi-catalog)add properties converter fe ut (#27254)
c99eb5d80fd is described below
commit c99eb5d80fdc0ebd570376073b38b457dcc0c3c3
Author: slothever <[email protected]>
AuthorDate: Wed Nov 29 19:01:29 2023 +0800
[fix](multi-catalog)add properties converter fe ut (#27254)
---
.../java/org/apache/doris/common/util/S3Util.java | 1 +
.../org/apache/doris/common/util/S3UtilTest.java | 104 +++++++++++++++
.../datasource/property/PropertyConverterTest.java | 140 +++++++++++++++++++++
3 files changed, 245 insertions(+)
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/util/S3Util.java
b/fe/fe-core/src/main/java/org/apache/doris/common/util/S3Util.java
index adf86cf73de..98790bc9e83 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/util/S3Util.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/S3Util.java
@@ -131,6 +131,7 @@ public class S3Util {
if (StringUtils.isNotEmpty(host)) {
// Replace 'hdfs://key/' to 'hdfs://name_service/key/'
// Or hdfs:///abc to hdfs://name_service/abc
+ // TODO: check host in path when the 'dfs.nameservices' has
multiple hosts
return location.replace(normalizedPrefix, normalizedPrefix +
host + "/");
} else {
// 'hdfs://null/' equals the 'hdfs:///'
diff --git
a/fe/fe-core/src/test/java/org/apache/doris/common/util/S3UtilTest.java
b/fe/fe-core/src/test/java/org/apache/doris/common/util/S3UtilTest.java
new file mode 100644
index 00000000000..70bad23e01f
--- /dev/null
+++ b/fe/fe-core/src/test/java/org/apache/doris/common/util/S3UtilTest.java
@@ -0,0 +1,104 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.common.util;
+
+import org.apache.doris.fs.FileSystemFactory;
+import org.apache.doris.fs.FileSystemType;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class S3UtilTest {
+ @Test
+ public void testLocationConvert() {
+ String loc;
+ loc = S3Util.convertToS3IfNecessary("hdfs://dir/file.path", new
HashMap<>());
+ Assertions.assertTrue(loc.startsWith("hdfs://"));
+
+ Map<String, String> props = new HashMap<>();
+ props.put("dfs.nameservices", "ns");
+ loc = S3Util.convertToS3IfNecessary("hdfs:///dir/file.path", props);
+ Assertions.assertTrue(loc.startsWith("hdfs://") &&
!loc.startsWith("hdfs:///"));
+ loc = S3Util.convertToS3IfNecessary("hdfs:/dir/file.path", props);
+ Assertions.assertTrue(loc.startsWith("hdfs://"));
+ props.put("dfs.nameservices", "");
+ loc = S3Util.convertToS3IfNecessary("hdfs:/dir/file.path", props);
+ Assertions.assertTrue(loc.startsWith("/dir") &&
!loc.startsWith("hdfs://"));
+
+ loc = S3Util.convertToS3IfNecessary("oss://test.com", props);
+ Assertions.assertTrue(loc.startsWith("oss://"));
+
+ loc = S3Util.convertToS3IfNecessary("gcs://test.com", props);
+ Assertions.assertTrue(loc.startsWith("gcs://"));
+
+ loc = S3Util.convertToS3IfNecessary("cos://test.com", props);
+ Assertions.assertTrue(loc.startsWith("cos://"));
+
+ loc = S3Util.convertToS3IfNecessary("cosn://test.com", props);
+ Assertions.assertTrue(loc.startsWith("cosn://"));
+
+ loc = S3Util.convertToS3IfNecessary("obs://test.com", props);
+ Assertions.assertTrue(loc.startsWith("obs://"));
+ }
+
+
+ @Test
+ public void testScanRangeLocationConvert() throws Exception {
+ String loc;
+ Map<String, String> rangeProps = new HashMap<>();
+ loc = S3Util.toScanRangeLocation("hdfs://dir/file.path",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("hdfs://"));
+ Assertions.assertEquals(FileSystemFactory.getFSIdentity(loc,
null).first, FileSystemType.DFS);
+
+ Map<String, String> props = new HashMap<>();
+ props.put("dfs.nameservices", "ns");
+ loc = S3Util.toScanRangeLocation("hdfs:///dir/file.path",
props).toString();
+ Assertions.assertTrue(loc.startsWith("hdfs://") &&
!loc.startsWith("hdfs:///"));
+ loc = S3Util.toScanRangeLocation("hdfs:/dir/file.path",
props).toString();
+ Assertions.assertTrue(loc.startsWith("hdfs://"));
+ props.put("dfs.nameservices", "");
+ loc = S3Util.toScanRangeLocation("hdfs:/dir/file.path",
props).toString();
+ Assertions.assertTrue(loc.startsWith("/dir") &&
!loc.startsWith("hdfs://"));
+
+ loc = S3Util.toScanRangeLocation("oss://test.com",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("s3://"));
+ Assertions.assertEquals(FileSystemFactory.getFSIdentity(loc,
null).first, FileSystemType.S3);
+
+ loc =
S3Util.toScanRangeLocation("oss://test.oss-dls.aliyuncs.com/path",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("oss://test.oss-dls.aliyuncs"));
+ Assertions.assertEquals(FileSystemFactory.getFSIdentity(loc,
null).first, FileSystemType.DFS);
+
+ loc = S3Util.toScanRangeLocation("cos://test.com",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("s3://"));
+ Assertions.assertEquals(FileSystemFactory.getFSIdentity(loc,
null).first, FileSystemType.S3);
+
+ loc = S3Util.toScanRangeLocation("cosn://test.com",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("cosn://"));
+ Assertions.assertEquals(FileSystemFactory.getFSIdentity(loc,
null).first, FileSystemType.OFS);
+
+ loc = S3Util.toScanRangeLocation("obs://test.com",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("s3://"));
+ Assertions.assertEquals(FileSystemFactory.getFSIdentity(loc,
null).first, FileSystemType.S3);
+
+ loc = S3Util.toScanRangeLocation("unknown://test.com",
rangeProps).toString();
+ Assertions.assertTrue(loc.startsWith("unknown://"));
+ }
+}
diff --git
a/fe/fe-core/src/test/java/org/apache/doris/datasource/property/PropertyConverterTest.java
b/fe/fe-core/src/test/java/org/apache/doris/datasource/property/PropertyConverterTest.java
index 153f989b50b..0695bc2f6c1 100644
---
a/fe/fe-core/src/test/java/org/apache/doris/datasource/property/PropertyConverterTest.java
+++
b/fe/fe-core/src/test/java/org/apache/doris/datasource/property/PropertyConverterTest.java
@@ -35,8 +35,11 @@ import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.jmockit.Deencapsulation;
import org.apache.doris.datasource.HMSExternalCatalog;
+import org.apache.doris.datasource.MaxComputeExternalCatalog;
+import org.apache.doris.datasource.iceberg.IcebergExternalCatalog;
import org.apache.doris.datasource.property.constants.CosProperties;
import org.apache.doris.datasource.property.constants.GCSProperties;
+import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.datasource.property.constants.MinioProperties;
import org.apache.doris.datasource.property.constants.ObsProperties;
import org.apache.doris.datasource.property.constants.OssProperties;
@@ -46,6 +49,7 @@ import org.apache.doris.tablefunction.S3TableValuedFunction;
import org.apache.doris.thrift.TFileFormatType;
import org.apache.doris.utframe.TestWithFeService;
+import com.aliyun.datalake.metastore.common.DataLakeConfig;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import org.junit.jupiter.api.Assertions;
@@ -285,6 +289,136 @@ public class PropertyConverterTest extends
TestWithFeService {
Assertions.assertEquals(hdProps.size(), 20);
}
+ @Test
+ public void testOssHdfsProperties() throws Exception {
+ String catalogName1 = "hms_oss_hdfs";
+ String query1 = "create catalog " + catalogName1 + " properties (\n"
+ + " 'type'='hms',\n"
+ + " 'hive.metastore.uris' = 'thrift://172.21.0.1:7004',\n"
+ + " 'oss.endpoint' = 'oss-cn-beijing.aliyuncs.com',\n"
+ + " 'oss.hdfs.enabled' = 'true',\n"
+ + " 'oss.access_key' = 'akk',\n"
+ + " 'oss.secret_key' = 'skk'\n"
+ + ");";
+ String catalogName = "hms_oss_hdfs";
+ CreateCatalogStmt analyzedStmt = createStmt(query1);
+ HMSExternalCatalog catalog = createAndGetCatalog(analyzedStmt,
catalogName);
+ Map<String, String> hdProps =
catalog.getCatalogProperty().getHadoopProperties();
+ Assertions.assertEquals("com.aliyun.jindodata.oss.JindoOssFileSystem",
hdProps.get("fs.oss.impl"));
+ Assertions.assertEquals("cn-beijing.oss-dls.aliyuncs.com",
hdProps.get("fs.oss.endpoint"));
+ }
+
+ @Test
+ public void testDlfPropertiesConverter() throws Exception {
+ String queryDlf1 = "create catalog hms_dlf1 properties (\n"
+ + " 'type'='hms',\n"
+ + " 'hive.metastore.type'='dlf',\n"
+ + " 'dlf.proxy.mode' = 'DLF_ONLY',\n"
+ + " 'dlf.endpoint' = 'dlf.cn-beijing.aliyuncs.com',\n"
+ + " 'dlf.uid' = '20239444',\n"
+ + " 'dlf.access_key' = 'akk',\n"
+ + " 'dlf.secret_key' = 'skk',\n"
+ + " 'dlf.region' = 'cn-beijing',\n"
+ + " 'dlf.access.public' = 'false'\n"
+ + ");";
+ String catalogName = "hms_dlf1";
+ CreateCatalogStmt analyzedStmt = createStmt(queryDlf1);
+ HMSExternalCatalog catalog = createAndGetCatalog(analyzedStmt,
catalogName);
+ Map<String, String> properties =
catalog.getCatalogProperty().getProperties();
+ Assertions.assertEquals("hms", properties.get("type"));
+ Assertions.assertEquals("dlf",
properties.get(HMSProperties.HIVE_METASTORE_TYPE));
+ Assertions.assertEquals("akk",
properties.get(DataLakeConfig.CATALOG_ACCESS_KEY_ID));
+ Assertions.assertEquals("skk",
properties.get(DataLakeConfig.CATALOG_ACCESS_KEY_SECRET));
+ Assertions.assertEquals("dlf.cn-beijing.aliyuncs.com",
properties.get(DataLakeConfig.CATALOG_ENDPOINT));
+ Assertions.assertEquals("cn-beijing",
properties.get(DataLakeConfig.CATALOG_REGION_ID));
+ Assertions.assertEquals("20239444",
properties.get(DataLakeConfig.CATALOG_USER_ID));
+
+ Map<String, String> hdProps =
catalog.getCatalogProperty().getHadoopProperties();
+ Assertions.assertEquals("akk", hdProps.get(OssProperties.ACCESS_KEY));
+ Assertions.assertEquals("skk", hdProps.get(OssProperties.SECRET_KEY));
+ Assertions.assertEquals("http://oss-cn-beijing-internal.aliyuncs.com",
+ hdProps.get(OssProperties.ENDPOINT));
+
+ String queryDlf2 = "create catalog hms_dlf2 properties (\n"
+ + " 'type'='hms',\n"
+ + " 'hive.metastore.type'='dlf',\n"
+ + " 'dlf.catalog.endpoint' =
'dlf.cn-beijing.aliyuncs.com',\n"
+ + " 'dlf.catalog.uid' = '20239444',\n"
+ + " 'dlf.catalog.id' = 'catalogId',\n"
+ + " 'dlf.catalog.proxyMode' = 'DLF_ONLY',\n"
+ + " 'dlf.catalog.accessKeyId' = 'akk',\n"
+ + " 'dlf.catalog.accessKeySecret' = 'skk',\n"
+ + " 'dlf.catalog.region' = 'cn-beijing',\n"
+ + " 'oss.hdfs.enabled' = 'true',\n"
+ + " 'dlf.catalog.accessPublic' = 'true'\n"
+ + ");";
+ String catalogName2 = "hms_dlf2";
+ CreateCatalogStmt analyzedStmt2 = createStmt(queryDlf2);
+ HMSExternalCatalog catalog2 = createAndGetCatalog(analyzedStmt2,
catalogName2);
+ Map<String, String> properties2 =
catalog2.getCatalogProperty().getProperties();
+ Assertions.assertEquals("dlf",
properties2.get(HMSProperties.HIVE_METASTORE_TYPE));
+ Assertions.assertEquals("akk",
properties2.get(DataLakeConfig.CATALOG_ACCESS_KEY_ID));
+ Assertions.assertEquals("skk",
properties2.get(DataLakeConfig.CATALOG_ACCESS_KEY_SECRET));
+ Assertions.assertEquals("dlf.cn-beijing.aliyuncs.com",
properties2.get(DataLakeConfig.CATALOG_ENDPOINT));
+ Assertions.assertEquals("cn-beijing",
properties2.get(DataLakeConfig.CATALOG_REGION_ID));
+ Assertions.assertEquals("20239444",
properties2.get(DataLakeConfig.CATALOG_USER_ID));
+
+ Map<String, String> hdProps2 =
catalog2.getCatalogProperty().getHadoopProperties();
+ Assertions.assertEquals("akk", hdProps2.get(OssProperties.ACCESS_KEY));
+ Assertions.assertEquals("skk", hdProps2.get(OssProperties.SECRET_KEY));
+ Assertions.assertEquals("cn-beijing.oss-dls.aliyuncs.com",
hdProps2.get(OssProperties.ENDPOINT));
+
+ String queryDlfIceberg = "create catalog dlf_iceberg properties (\n"
+ + " 'type'='iceberg',\n"
+ + " 'iceberg.catalog.type'='dlf',\n"
+ + " 'dlf.proxy.mode' = 'DLF_ONLY',\n"
+ + " 'dlf.endpoint' = 'dlf.cn-beijing.aliyuncs.com',\n"
+ + " 'dlf.uid' = '20239444',\n"
+ + " 'dlf.access_key' = 'akk',\n"
+ + " 'dlf.secret_key' = 'skk',\n"
+ + " 'dlf.region' = 'cn-beijing'\n"
+ + ");";
+ String catalogName3 = "dlf_iceberg";
+ CreateCatalogStmt analyzedStmt3 = createStmt(queryDlfIceberg);
+ IcebergExternalCatalog catalog3 =
createAndGetIcebergCatalog(analyzedStmt3, catalogName3);
+ Map<String, String> properties3 =
catalog3.getCatalogProperty().getProperties();
+ Assertions.assertEquals("dlf",
properties3.get(IcebergExternalCatalog.ICEBERG_CATALOG_TYPE));
+ Assertions.assertEquals("akk",
properties3.get(DataLakeConfig.CATALOG_ACCESS_KEY_ID));
+ Assertions.assertEquals("skk",
properties3.get(DataLakeConfig.CATALOG_ACCESS_KEY_SECRET));
+ Assertions.assertEquals("dlf.cn-beijing.aliyuncs.com",
properties3.get(DataLakeConfig.CATALOG_ENDPOINT));
+ Assertions.assertEquals("cn-beijing",
properties3.get(DataLakeConfig.CATALOG_REGION_ID));
+ Assertions.assertEquals("20239444",
properties3.get(DataLakeConfig.CATALOG_USER_ID));
+
+ Map<String, String> hdProps3 =
catalog3.getCatalogProperty().getHadoopProperties();
+ Assertions.assertEquals("akk", hdProps3.get(OssProperties.ACCESS_KEY));
+ Assertions.assertEquals("skk", hdProps3.get(OssProperties.SECRET_KEY));
+ Assertions.assertEquals("http://oss-cn-beijing-internal.aliyuncs.com",
hdProps3.get(OssProperties.ENDPOINT));
+ }
+
+ @Test
+ public void testMcPropertiesConverter() throws Exception {
+ String queryDlf1 = "create catalog hms_mc properties (\n"
+ + " 'type'='max_compute',\n"
+ + " 'mc.default.project' = 'project0',\n"
+ + " 'mc.region' = 'cn-beijing',\n"
+ + " 'mc.access_key' = 'ak',\n"
+ + " 'mc.secret_key' = 'sk',\n"
+ + " 'mc.public_access' = 'true'\n"
+ + ");";
+ String catalogName = "hms_mc";
+ CreateCatalogStmt analyzedStmt = createStmt(queryDlf1);
+ Env.getCurrentEnv().getCatalogMgr().createCatalog(analyzedStmt);
+ MaxComputeExternalCatalog catalog = (MaxComputeExternalCatalog)
Env.getCurrentEnv()
+ .getCatalogMgr().getCatalog(catalogName);
+ Map<String, String> properties =
catalog.getCatalogProperty().getProperties();
+ Assertions.assertEquals(properties.get("type"), "max_compute");
+ Assertions.assertEquals(properties.get("mc.region"), "cn-beijing");
+ Assertions.assertEquals(properties.get("mc.access_key"), "ak");
+ Assertions.assertEquals(properties.get("mc.secret_key"), "sk");
+ Assertions.assertEquals(properties.get("mc.public_access"), "true");
+ Assertions.assertEquals(properties.get("mc.default.project"),
"project0");
+ }
+
@Test
public void testGlueCatalogPropertiesConverter() throws Exception {
String queryOld = "create catalog hms_glue_old properties (\n"
@@ -407,6 +541,12 @@ public class PropertyConverterTest extends
TestWithFeService {
return (HMSExternalCatalog)
Env.getCurrentEnv().getCatalogMgr().getCatalog(name);
}
+ private static IcebergExternalCatalog
createAndGetIcebergCatalog(CreateCatalogStmt analyzedStmt, String name)
+ throws UserException {
+ Env.getCurrentEnv().getCatalogMgr().createCatalog(analyzedStmt);
+ return (IcebergExternalCatalog)
Env.getCurrentEnv().getCatalogMgr().getCatalog(name);
+ }
+
@Test
public void testSerialization() throws Exception {
MetaContext metaContext = new MetaContext();
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]