This is an automated email from the ASF dual-hosted git repository.
casion pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/linkis.git
The following commit(s) were added to refs/heads/master by this push:
new e3c84aa44f fix kafka datasource fixerror & code format (#5291)
e3c84aa44f is described below
commit e3c84aa44fcb45bd290a7d721d58578650a0a951
Author: aiceflower <[email protected]>
AuthorDate: Wed Nov 19 15:09:37 2025 +0800
fix kafka datasource fixerror & code format (#5291)
Co-authored-by: kinghao <[email protected]>
---
linkis-commons/linkis-storage/pom.xml | 352 ++++++++++-----------
.../storage/utils/StorageConfigurationTest.scala | 168 +++++-----
.../service/kafka/pom.xml | 5 +
.../gateway/authentication/dao/TokenDaoTest.java | 132 ++++----
.../service/CachedTokenServiceTest.java | 190 +++++------
5 files changed, 426 insertions(+), 421 deletions(-)
diff --git a/linkis-commons/linkis-storage/pom.xml
b/linkis-commons/linkis-storage/pom.xml
index 72ce14950c..95282243fd 100644
--- a/linkis-commons/linkis-storage/pom.xml
+++ b/linkis-commons/linkis-storage/pom.xml
@@ -1,176 +1,176 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one or more
- ~ contributor license agreements. See the NOTICE file distributed with
- ~ this work for additional information regarding copyright ownership.
- ~ The ASF licenses this file to You under the Apache License, Version 2.0
- ~ (the "License"); you may not use this file except in compliance with
- ~ the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis</artifactId>
- <version>${revision}</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <artifactId>linkis-storage</artifactId>
-
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis-common</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis-hadoop-common</artifactId>
- <version>${project.version}</version>
- <exclusions>
- <exclusion>
- <groupId>com.google.protobuf</groupId>
- <artifactId>protobuf-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>io.netty</groupId>
- <artifactId>netty</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>com.google.protobuf</groupId>
- <artifactId>protobuf-java</artifactId>
- <version>${protobuf.version}</version>
- </dependency>
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-core</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.poi</groupId>
- <artifactId>poi</artifactId>
- <version>${poi.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.poi</groupId>
- <artifactId>poi-ooxml</artifactId>
- <version>${poi.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.github.pjfanning</groupId>
- <artifactId>excel-streaming-reader</artifactId>
- <version>5.0.2</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-compress</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-aliyun</artifactId>
- <version>3.3.4</version>
- </dependency>
- <dependency>
- <groupId>com.aliyun.oss</groupId>
- <artifactId>aliyun-sdk-oss</artifactId>
- <version>3.16.0</version>
- </dependency>
- <dependency>
- <groupId>org.jdom</groupId>
- <artifactId>jdom2</artifactId>
- </dependency>
-
- <dependency>
- <groupId>com.amazonaws</groupId>
- <artifactId>aws-java-sdk-s3</artifactId>
- <version>1.12.261</version>
- </dependency>
-
- <dependency>
- <groupId>com.azure</groupId>
- <artifactId>azure-storage-blob</artifactId>
- </dependency>
- <dependency>
- <groupId>com.azure</groupId>
- <artifactId>azure-storage-common</artifactId>
- </dependency>
- <dependency>
- <groupId>com.azure</groupId>
- <artifactId>azure-identity</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.parquet</groupId>
- <artifactId>parquet-avro</artifactId>
- <version>${parquet-avro.version}</version>
- <scope>${storage.parquet.scope}</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
- <version>${hadoop.version}</version>
- <scope>${storage.parquet.scope}</scope>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- <!-- for hadoop 3.3.3 -->
- <exclusion>
- <groupId>ch.qos.reload4j</groupId>
- <artifactId>reload4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-reload4j</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.orc</groupId>
- <artifactId>orc-core</artifactId>
- <version>${orc-core.version}</version>
- <classifier>nohive</classifier>
- <scope>${storage.orc.scope}</scope>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-storage-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- </plugin>
- </plugins>
- </build>
-
-</project>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one or more
+ ~ contributor license agreements. See the NOTICE file distributed with
+ ~ this work for additional information regarding copyright ownership.
+ ~ The ASF licenses this file to You under the Apache License, Version 2.0
+ ~ (the "License"); you may not use this file except in compliance with
+ ~ the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.linkis</groupId>
+ <artifactId>linkis</artifactId>
+ <version>${revision}</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+ <artifactId>linkis-storage</artifactId>
+
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.linkis</groupId>
+ <artifactId>linkis-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.linkis</groupId>
+ <artifactId>linkis-hadoop-common</artifactId>
+ <version>${project.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.poi</groupId>
+ <artifactId>poi</artifactId>
+ <version>${poi.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.poi</groupId>
+ <artifactId>poi-ooxml</artifactId>
+ <version>${poi.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.github.pjfanning</groupId>
+ <artifactId>excel-streaming-reader</artifactId>
+ <version>5.0.2</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-compress</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-aliyun</artifactId>
+ <version>3.3.4</version>
+ </dependency>
+ <dependency>
+ <groupId>com.aliyun.oss</groupId>
+ <artifactId>aliyun-sdk-oss</artifactId>
+ <version>3.16.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jdom</groupId>
+ <artifactId>jdom2</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>aws-java-sdk-s3</artifactId>
+ <version>1.12.261</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-storage-blob</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-storage-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-identity</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.parquet</groupId>
+ <artifactId>parquet-avro</artifactId>
+ <version>${parquet-avro.version}</version>
+ <scope>${storage.parquet.scope}</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>${storage.parquet.scope}</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <!-- for hadoop 3.3.3 -->
+ <exclusion>
+ <groupId>ch.qos.reload4j</groupId>
+ <artifactId>reload4j</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-reload4j</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.orc</groupId>
+ <artifactId>orc-core</artifactId>
+ <version>${orc-core.version}</version>
+ <classifier>nohive</classifier>
+ <scope>${storage.orc.scope}</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-storage-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>net.alchim31.maven</groupId>
+ <artifactId>scala-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git
a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
index e5adef9124..2d1cff509d 100644
---
a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
+++
b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
@@ -1,84 +1,84 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.storage.utils
-
-import org.junit.jupiter.api.{Assertions, DisplayName, Test}
-
-class StorageConfigurationTest {
-
- @Test
- @DisplayName("constTest")
- def constTest(): Unit = {
-
- val storagerootuser = StorageConfiguration.STORAGE_ROOT_USER.getValue
- val hdfsrootuser = StorageConfiguration.HDFS_ROOT_USER.getValue
- val localrootuser = StorageConfiguration.LOCAL_ROOT_USER.getValue
- val storageusergroup = StorageConfiguration.STORAGE_USER_GROUP.getValue
- val storagersfiletype = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue
- val storagersfilesuffix =
StorageConfiguration.STORAGE_RS_FILE_SUFFIX.getValue
- val types = StorageConfiguration.ResultTypes
- val storageresultsetpackage =
StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue
- val storageresultsetclasses =
StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue
- val storagebuildfsclasses =
StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue
- val issharenode = StorageConfiguration.IS_SHARE_NODE.getValue
- val enableioproxy = StorageConfiguration.ENABLE_IO_PROXY.getValue
- val ioUser = StorageConfiguration.IO_USER.getValue
- val iofsexpiretime = StorageConfiguration.IO_FS_EXPIRE_TIME.getValue
- val iodefaultcreator = StorageConfiguration.IO_DEFAULT_CREATOR.getValue
- val iofsreinit = StorageConfiguration.IO_FS_RE_INIT.getValue
- val ioinitretrylimit = StorageConfiguration.IO_INIT_RETRY_LIMIT.getValue
- val storagehdfsgroup = StorageConfiguration.STORAGE_HDFS_GROUP.getValue
- val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue
- val hdfspathprefixcheckon =
StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue
- val hdfspathprefixremove =
StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue
- val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE
-
- Assertions.assertEquals("hadoop", storagerootuser)
- Assertions.assertEquals("hadoop", hdfsrootuser)
- Assertions.assertEquals("root", localrootuser)
- Assertions.assertEquals("bdap", storageusergroup)
- Assertions.assertEquals("utf-8", storagersfiletype)
- Assertions.assertEquals(".dolphin", storagersfilesuffix)
- Assertions.assertTrue(types.size > 0)
- Assertions.assertEquals("org.apache.linkis.storage.resultset",
storageresultsetpackage)
- Assertions.assertEquals(
-
"txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet",
- storageresultsetclasses
- )
- Assertions.assertEquals(
-
"org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem,"
+
-
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem,"
+
- "org.apache.linkis.storage.factory.impl.BuildAzureBlobFileSystem",
- storagebuildfsclasses
- )
- Assertions.assertTrue(issharenode)
- Assertions.assertFalse(enableioproxy)
- Assertions.assertEquals("root", ioUser)
- Assertions.assertTrue(600000 == iofsexpiretime)
- Assertions.assertEquals("IDE", iodefaultcreator)
- Assertions.assertEquals("re-init", iofsreinit)
- Assertions.assertTrue(10 == ioinitretrylimit)
- Assertions.assertEquals("hadoop", storagehdfsgroup)
- Assertions.assertTrue(30 == doublefractionlen)
- Assertions.assertTrue(hdfspathprefixcheckon)
- Assertions.assertTrue(hdfspathprefixremove)
- Assertions.assertFalse(fschecksumdisbale)
-
- }
-
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.storage.utils
+
+import org.junit.jupiter.api.{Assertions, DisplayName, Test}
+
+class StorageConfigurationTest {
+
+ @Test
+ @DisplayName("constTest")
+ def constTest(): Unit = {
+
+ val storagerootuser = StorageConfiguration.STORAGE_ROOT_USER.getValue
+ val hdfsrootuser = StorageConfiguration.HDFS_ROOT_USER.getValue
+ val localrootuser = StorageConfiguration.LOCAL_ROOT_USER.getValue
+ val storageusergroup = StorageConfiguration.STORAGE_USER_GROUP.getValue
+ val storagersfiletype = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue
+ val storagersfilesuffix =
StorageConfiguration.STORAGE_RS_FILE_SUFFIX.getValue
+ val types = StorageConfiguration.ResultTypes
+ val storageresultsetpackage =
StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue
+ val storageresultsetclasses =
StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue
+ val storagebuildfsclasses =
StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue
+ val issharenode = StorageConfiguration.IS_SHARE_NODE.getValue
+ val enableioproxy = StorageConfiguration.ENABLE_IO_PROXY.getValue
+ val ioUser = StorageConfiguration.IO_USER.getValue
+ val iofsexpiretime = StorageConfiguration.IO_FS_EXPIRE_TIME.getValue
+ val iodefaultcreator = StorageConfiguration.IO_DEFAULT_CREATOR.getValue
+ val iofsreinit = StorageConfiguration.IO_FS_RE_INIT.getValue
+ val ioinitretrylimit = StorageConfiguration.IO_INIT_RETRY_LIMIT.getValue
+ val storagehdfsgroup = StorageConfiguration.STORAGE_HDFS_GROUP.getValue
+ val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue
+ val hdfspathprefixcheckon =
StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue
+ val hdfspathprefixremove =
StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue
+ val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE
+
+ Assertions.assertEquals("hadoop", storagerootuser)
+ Assertions.assertEquals("hadoop", hdfsrootuser)
+ Assertions.assertEquals("root", localrootuser)
+ Assertions.assertEquals("bdap", storageusergroup)
+ Assertions.assertEquals("utf-8", storagersfiletype)
+ Assertions.assertEquals(".dolphin", storagersfilesuffix)
+ Assertions.assertTrue(types.size > 0)
+ Assertions.assertEquals("org.apache.linkis.storage.resultset",
storageresultsetpackage)
+ Assertions.assertEquals(
+
"txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet",
+ storageresultsetclasses
+ )
+ Assertions.assertEquals(
+
"org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem,"
+
+
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem,"
+
+ "org.apache.linkis.storage.factory.impl.BuildAzureBlobFileSystem",
+ storagebuildfsclasses
+ )
+ Assertions.assertTrue(issharenode)
+ Assertions.assertFalse(enableioproxy)
+ Assertions.assertEquals("root", ioUser)
+ Assertions.assertTrue(600000 == iofsexpiretime)
+ Assertions.assertEquals("IDE", iodefaultcreator)
+ Assertions.assertEquals("re-init", iofsreinit)
+ Assertions.assertTrue(10 == ioinitretrylimit)
+ Assertions.assertEquals("hadoop", storagehdfsgroup)
+ Assertions.assertTrue(30 == doublefractionlen)
+ Assertions.assertTrue(hdfspathprefixcheckon)
+ Assertions.assertTrue(hdfspathprefixremove)
+ Assertions.assertFalse(fschecksumdisbale)
+
+ }
+
+}
diff --git
a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/pom.xml
b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/pom.xml
index 38b1a438e8..ed8723c536 100644
---
a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/pom.xml
+++
b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/pom.xml
@@ -51,6 +51,11 @@
<artifactId>kafka-clients</artifactId>
<version>3.9.1</version>
</dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
</dependencies>
<build>
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
index 203ea9f903..a118be121c 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
@@ -1,66 +1,66 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.authentication.dao;
-
-import org.apache.linkis.common.conf.CommonVars;
-import org.apache.linkis.common.conf.Configuration;
-import org.apache.linkis.gateway.authentication.entity.TokenEntity;
-
-import org.apache.commons.lang3.StringUtils;
-
-import org.springframework.beans.factory.annotation.Autowired;
-
-import java.util.List;
-
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
-
-class TokenDaoTest extends BaseDaoTest {
-
- private static final Logger logger =
LoggerFactory.getLogger(BaseDaoTest.class);
-
- private static String TokenName =
- CommonVars.apply("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN().getValue())
- .getValue();
-
- @Autowired TokenDao tokenDao;
-
- @BeforeAll
- static void before() {
- if (StringUtils.isBlank(TokenName)) {
- TokenName = "LINKIS-UNAVAILABLE-TOKE";
- }
- }
-
- @Test
- void testSelectTokenByName() {
- TokenEntity result = tokenDao.selectTokenByName(TokenName);
- assertEquals(result.getTokenName(), TokenName);
- }
-
- @Test
- void testGetAllTokens() {
- List<TokenEntity> result = tokenDao.getAllTokens();
- assertNotEquals(result.size(), 0);
- }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.gateway.authentication.dao;
+
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.conf.Configuration;
+import org.apache.linkis.gateway.authentication.entity.TokenEntity;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+
+class TokenDaoTest extends BaseDaoTest {
+
+ private static final Logger logger =
LoggerFactory.getLogger(BaseDaoTest.class);
+
+ private static String TokenName =
+ CommonVars.apply("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN().getValue())
+ .getValue();
+
+ @Autowired TokenDao tokenDao;
+
+ @BeforeAll
+ static void before() {
+ if (StringUtils.isBlank(TokenName)) {
+ TokenName = "LINKIS-UNAVAILABLE-TOKE";
+ }
+ }
+
+ @Test
+ void testSelectTokenByName() {
+ TokenEntity result = tokenDao.selectTokenByName(TokenName);
+ assertEquals(result.getTokenName(), TokenName);
+ }
+
+ @Test
+ void testGetAllTokens() {
+ List<TokenEntity> result = tokenDao.getAllTokens();
+ assertNotEquals(result.size(), 0);
+ }
+}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
index f16024a4f6..a551f9ff13 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
@@ -1,95 +1,95 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.authentication.service;
-
-import org.apache.linkis.common.conf.CommonVars;
-import org.apache.linkis.common.conf.Configuration;
-import org.apache.linkis.gateway.authentication.Scan;
-import org.apache.linkis.gateway.authentication.WebApplicationServer;
-import org.apache.linkis.gateway.authentication.exception.TokenAuthException;
-
-import org.apache.commons.lang3.StringUtils;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.test.context.junit.jupiter.SpringExtension;
-
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.junit.jupiter.api.Assertions.*;
-
-@ExtendWith(SpringExtension.class)
-@SpringBootTest(classes = {WebApplicationServer.class, Scan.class})
-public class CachedTokenServiceTest {
- private static final Logger logger =
LoggerFactory.getLogger(CachedTokenServiceTest.class);
-
- private static String TokenName =
- CommonVars.apply("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN().getValue())
- .getValue();
-
- @Autowired CachedTokenService tokenService;
-
- @BeforeAll
- static void before() {
- if (StringUtils.isBlank(TokenName)) {
- TokenName = "LINKIS-UNAVAILABLE-TOKE";
- }
- }
-
- @Test
- void testIsTokenValid() {
- boolean isOk = tokenService.isTokenValid(TokenName);
- assertTrue(isOk);
- }
-
- @Test
- void testIsTokenAcceptableWithUser() {
- boolean isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test");
- assertTrue(isOk);
- isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test1");
- assertFalse(isOk);
- }
-
- @Test
- void testIsTokenAcceptableWithHost() {
- boolean isOk = tokenService.isTokenAcceptableWithHost(TokenName,
"127.0.0.1");
- assertTrue(isOk);
- isOk = tokenService.isTokenAcceptableWithHost(TokenName, "10.10.10.10");
- assertFalse(isOk);
- }
-
- @Test
- void testDoAuth() {
- boolean isOk = tokenService.doAuth(TokenName, "test", "127.0.0.1");
- assertTrue(isOk);
-
- Exception exception =
- assertThrows(
- TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test1", "127.0.0.1"));
- logger.info("assertThrows:{}", exception.getMessage());
-
- exception =
- assertThrows(
- TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test", "10.10.10.10"));
- logger.info("assertThrows:{}", exception.getMessage());
- }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.gateway.authentication.service;
+
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.conf.Configuration;
+import org.apache.linkis.gateway.authentication.Scan;
+import org.apache.linkis.gateway.authentication.WebApplicationServer;
+import org.apache.linkis.gateway.authentication.exception.TokenAuthException;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+@ExtendWith(SpringExtension.class)
+@SpringBootTest(classes = {WebApplicationServer.class, Scan.class})
+public class CachedTokenServiceTest {
+ private static final Logger logger =
LoggerFactory.getLogger(CachedTokenServiceTest.class);
+
+ private static String TokenName =
+ CommonVars.apply("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN().getValue())
+ .getValue();
+
+ @Autowired CachedTokenService tokenService;
+
+ @BeforeAll
+ static void before() {
+ if (StringUtils.isBlank(TokenName)) {
+ TokenName = "LINKIS-UNAVAILABLE-TOKE";
+ }
+ }
+
+ @Test
+ void testIsTokenValid() {
+ boolean isOk = tokenService.isTokenValid(TokenName);
+ assertTrue(isOk);
+ }
+
+ @Test
+ void testIsTokenAcceptableWithUser() {
+ boolean isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test");
+ assertTrue(isOk);
+ isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test1");
+ assertFalse(isOk);
+ }
+
+ @Test
+ void testIsTokenAcceptableWithHost() {
+ boolean isOk = tokenService.isTokenAcceptableWithHost(TokenName,
"127.0.0.1");
+ assertTrue(isOk);
+ isOk = tokenService.isTokenAcceptableWithHost(TokenName, "10.10.10.10");
+ assertFalse(isOk);
+ }
+
+ @Test
+ void testDoAuth() {
+ boolean isOk = tokenService.doAuth(TokenName, "test", "127.0.0.1");
+ assertTrue(isOk);
+
+ Exception exception =
+ assertThrows(
+ TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test1", "127.0.0.1"));
+ logger.info("assertThrows:{}", exception.getMessage());
+
+ exception =
+ assertThrows(
+ TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test", "10.10.10.10"));
+ logger.info("assertThrows:{}", exception.getMessage());
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]