This is an automated email from the ASF dual-hosted git repository.
casion pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/linkis.git
The following commit(s) were added to refs/heads/master by this push:
new 19961164d4 Release 1.8.0 code merge (#5269)
19961164d4 is described below
commit 19961164d4f3d38c4a9b13bc10fc6b011730cf7b
Author: aiceflower <[email protected]>
AuthorDate: Fri Oct 17 17:18:44 2025 +0800
Release 1.8.0 code merge (#5269)
* update version to 1.8.0
* Upgrade `LINKIS_VERSION` to `1.8.0` to Fix Integration Test Failure in
GitHub Actions (#5250)
* build(ci): update Linkis version to 1.8.0
Signed-off-by: kazutoiris <[email protected]>
* build(ci): update Docker publish workflow to use the current repository
Signed-off-by: kazutoiris <[email protected]>
---------
Signed-off-by: kazutoiris <[email protected]>
* Fix KIND image loading, script typo, and cache directory creation (#5251)
* fix(ci): pass `USING_KIND` variable to `install-mysql.sh`
Signed-off-by: kazutoiris <[email protected]>
* fix(ci): correct typo in script name
Signed-off-by: kazutoiris <[email protected]>
* fix(ci): create `TAR_CACHE_ROOT` directory if not exists
Signed-off-by: kazutoiris <[email protected]>
---------
Signed-off-by: kazutoiris <[email protected]>
* chore: prepare to release 1.8.0 (#5254)
Signed-off-by: kazutoiris <[email protected]>
* support azure (#5214)
* support azure
* remove file
* add azure conf
---------
Co-authored-by: “v_kkhuang” <“[email protected]”>
* Add OAuth2 authentication support (#5253)
* feat(mg-gateway): add OAuth2 authentication support
- Add OAuth2 authentication configuration to GatewayConfiguration
- Implement OAuth2Authentication
- Update `SecurityFilter` and `UserRestful` to process OAuth2 request
Signed-off-by: kazutoiris <[email protected]>
* feat(mg-gateway): add OAuth configuration
- Add OAuth-related properties to `linkis-mg-gateway.properties`
- Include support for GitHub OAuth as an example
Signed-off-by: kazutoiris <[email protected]>
* style: reformat code
Signed-off-by: kazutoiris <[email protected]>
* feat(mg-gateway): add OAuth in frontend
- Add OAuth login option to the login page
- Implement OAuth callback route and component
- Add translations for OAuth login text
Signed-off-by: kazutoiris <[email protected]>
* docs: add OAuth authentication documentation
---------
Signed-off-by: kazutoiris <[email protected]>
* fix azure compile error (#5264)
* fix azure compile error
* fix azure compile error
* fix storage test error
---------
Co-authored-by: aiceflower <[email protected]>
* remove default token (#5265)
* fix compile error
* fix token security
* fix token security
---------
Co-authored-by: aiceflower <[email protected]>
* fix token security (#5266)
Co-authored-by: aiceflower <[email protected]>
---------
Signed-off-by: kazutoiris <[email protected]>
Co-authored-by: Casion <[email protected]>
Co-authored-by: Kazuto Iris <[email protected]>
Co-authored-by: v-kkhuang <[email protected]>
Co-authored-by: “v_kkhuang” <“[email protected]”>
Co-authored-by: aiceflower <[email protected]>
---
.github/workflows/integration-test.yml | 2 +-
.github/workflows/publish-docker.yaml | 4 +-
docs/configuration/linkis-gateway-core.md | 8 +
.../apache/linkis/common/conf/Configuration.scala | 2 +-
linkis-commons/linkis-storage/pom.xml | 340 ++++++++--------
.../factory/impl/BuildAzureBlobFileSystem.java | 61 +++
.../storage/fs/impl/AzureBlobFileSystem.java | 427 +++++++++++++++++++++
.../storage/utils/StorageConfiguration.scala | 6 +-
.../apache/linkis/storage/utils/StorageUtils.scala | 4 +-
.../storage/utils/StorageConfigurationTest.scala | 162 ++++----
.../src/test/resources/conf/linkis-cli.properties | 2 +-
.../src/test/resources/linkis-cli.properties | 2 +-
.../linkis/ujes/client/JobObserveActionTest.scala | 4 +-
linkis-dist/bin/install-linkis-to-kubernetes.sh | 2 +-
linkis-dist/bin/install.sh | 100 ++++-
linkis-dist/deploy-config/linkis-env.sh | 2 +-
linkis-dist/docker/ldh.Dockerfile | 2 +-
linkis-dist/docker/linkis.Dockerfile | 4 +-
...dbc.sh => make-linkis-image-with-mysql-jdbc.sh} | 0
linkis-dist/docker/scripts/utils.sh | 2 +
.../linkis/templates/configmap-init-sql.yaml | 4 +-
.../linkis/templates/configmap-linkis-config.yaml | 4 +-
linkis-dist/package/admin/configuration_helper.sh | 6 +-
.../package/conf/linkis-cli/linkis-cli.properties | 2 +-
.../package/conf/linkis-mg-gateway.properties | 9 +
linkis-dist/package/conf/linkis.properties | 24 +-
linkis-dist/package/db/linkis_dml.sql | 14 +-
linkis-dist/package/db/linkis_dml_pg.sql | 12 +-
linkis-dist/package/db/module/linkis-mg.sql | 17 +-
.../spark/config/SparkConfiguration.scala | 2 +-
.../spark/executor/TestSparkSqlExecutor.scala | 52 +--
.../gateway/authentication/dao/TokenDaoTest.java | 120 +++---
.../service/CachedTokenServiceTest.java | 178 +++++----
.../src/test/resources/create.sql | 8 +-
.../src/test/resources/create_pg.sql | 10 +-
.../gateway/config/GatewayConfiguration.scala | 9 +
.../linkis/gateway/security/SecurityFilter.scala | 3 +
.../linkis/gateway/security/UserRestful.scala | 15 +
.../security/oauth/OAuth2Authentication.scala | 340 ++++++++++++++++
linkis-web/.env | 2 +-
linkis-web/package.json | 2 +-
linkis-web/src/common/i18n/en.json | 1 +
linkis-web/src/common/i18n/zh.json | 1 +
linkis-web/src/dss/router.js | 10 +
linkis-web/src/dss/view/login/index.vue | 21 +-
linkis-web/src/dss/view/login/oauthCallback.vue | 55 +++
pom.xml | 10 +-
47 files changed, 1561 insertions(+), 506 deletions(-)
diff --git a/.github/workflows/integration-test.yml
b/.github/workflows/integration-test.yml
index 8a26905abe..f99f8c30e6 100644
--- a/.github/workflows/integration-test.yml
+++ b/.github/workflows/integration-test.yml
@@ -45,7 +45,7 @@ jobs:
TAG: ${{ github.sha }}
SKIP_TEST: true
HUB: ghcr.io/apache/linkis
- LINKIS_VERSION: 1.7.0
+ LINKIS_VERSION: 1.8.0
steps:
- name: Free up disk space
run: |
diff --git a/.github/workflows/publish-docker.yaml
b/.github/workflows/publish-docker.yaml
index d9199040d8..1b7c675a56 100644
--- a/.github/workflows/publish-docker.yaml
+++ b/.github/workflows/publish-docker.yaml
@@ -33,8 +33,8 @@ jobs:
env:
TAG: ${{ github.sha }}
SKIP_TEST: true
- HUB: ghcr.io/apache/linkis
- LINKIS_VERSION: 1.7.0
+ HUB: ghcr.io/${{ github.repository }}
+ LINKIS_VERSION: 1.8.0
steps:
- name: Checkout
uses: actions/checkout@v4
diff --git a/docs/configuration/linkis-gateway-core.md
b/docs/configuration/linkis-gateway-core.md
index be933b2a26..5a4f55a3d1 100644
--- a/docs/configuration/linkis-gateway-core.md
+++ b/docs/configuration/linkis-gateway-core.md
@@ -36,3 +36,11 @@
|linkis-gateway-core|wds.linkis.gateway.this.schema| | gateway.this.schema|
|linkis-gateway-core|wds.linkis.web.enable.water.mark|true|
web.enable.water.mark|
|linkis-gateway-core|wds.linkis.entrance.name| |linkis.entrance.name|
+|linkis-gateway-core|wds.linkis.gateway.conf.enable.oauth.auth| false
|wds.linkis.gateway.conf.enable.oauth.auth|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.authentication.url|
|wds.linkis.gateway.auth.oauth.authentication.url|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.exchange.url|
|wds.linkis.gateway.auth.oauth.exchange.url|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.validate.url|
|wds.linkis.gateway.auth.oauth.validate.url|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.validate.field|
|wds.linkis.gateway.auth.oauth.validate.field|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.client.id|
|wds.linkis.gateway.auth.oauth.client.id|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.client.secret|
|wds.linkis.gateway.auth.oauth.client.secret|
+|linkis-gateway-core|wds.linkis.gateway.auth.oauth.scope|
|wds.linkis.gateway.auth.oauth.scope|
diff --git
a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala
b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala
index 822bc2aa07..16cac1d204 100644
---
a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala
+++
b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/Configuration.scala
@@ -81,7 +81,7 @@ object Configuration extends Logging {
"The request interface %s is abnormal. You can try to troubleshoot
common problems in the knowledge base document"
)
- val LINKIS_TOKEN = CommonVars("wds.linkis.token", "LINKIS-AUTH")
+ val LINKIS_TOKEN = CommonVars("wds.linkis.token", "")
val GLOBAL_CONF_CHN_NAME = "全局设置"
diff --git a/linkis-commons/linkis-storage/pom.xml
b/linkis-commons/linkis-storage/pom.xml
index 6e04016fa7..72ce14950c 100644
--- a/linkis-commons/linkis-storage/pom.xml
+++ b/linkis-commons/linkis-storage/pom.xml
@@ -1,164 +1,176 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one or more
- ~ contributor license agreements. See the NOTICE file distributed with
- ~ this work for additional information regarding copyright ownership.
- ~ The ASF licenses this file to You under the Apache License, Version 2.0
- ~ (the "License"); you may not use this file except in compliance with
- ~ the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis</artifactId>
- <version>${revision}</version>
- <relativePath>../../pom.xml</relativePath>
- </parent>
- <artifactId>linkis-storage</artifactId>
-
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis-common</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.linkis</groupId>
- <artifactId>linkis-hadoop-common</artifactId>
- <version>${project.version}</version>
- <exclusions>
- <exclusion>
- <groupId>com.google.protobuf</groupId>
- <artifactId>protobuf-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>io.netty</groupId>
- <artifactId>netty</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>com.google.protobuf</groupId>
- <artifactId>protobuf-java</artifactId>
- <version>${protobuf.version}</version>
- </dependency>
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-core</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.poi</groupId>
- <artifactId>poi</artifactId>
- <version>${poi.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.poi</groupId>
- <artifactId>poi-ooxml</artifactId>
- <version>${poi.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.github.pjfanning</groupId>
- <artifactId>excel-streaming-reader</artifactId>
- <version>5.0.2</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-compress</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-aliyun</artifactId>
- <version>3.3.4</version>
- </dependency>
- <dependency>
- <groupId>com.aliyun.oss</groupId>
- <artifactId>aliyun-sdk-oss</artifactId>
- <version>3.16.0</version>
- </dependency>
- <dependency>
- <groupId>org.jdom</groupId>
- <artifactId>jdom2</artifactId>
- </dependency>
-
- <dependency>
- <groupId>com.amazonaws</groupId>
- <artifactId>aws-java-sdk-s3</artifactId>
- <version>1.12.261</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.parquet</groupId>
- <artifactId>parquet-avro</artifactId>
- <version>${parquet-avro.version}</version>
- <scope>${storage.parquet.scope}</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
- <version>${hadoop.version}</version>
- <scope>${storage.parquet.scope}</scope>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- <!-- for hadoop 3.3.3 -->
- <exclusion>
- <groupId>ch.qos.reload4j</groupId>
- <artifactId>reload4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-reload4j</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.orc</groupId>
- <artifactId>orc-core</artifactId>
- <version>${orc-core.version}</version>
- <classifier>nohive</classifier>
- <scope>${storage.orc.scope}</scope>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-storage-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- </plugin>
- </plugins>
- </build>
-
-</project>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one or more
+ ~ contributor license agreements. See the NOTICE file distributed with
+ ~ this work for additional information regarding copyright ownership.
+ ~ The ASF licenses this file to You under the Apache License, Version 2.0
+ ~ (the "License"); you may not use this file except in compliance with
+ ~ the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.linkis</groupId>
+ <artifactId>linkis</artifactId>
+ <version>${revision}</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+ <artifactId>linkis-storage</artifactId>
+
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.linkis</groupId>
+ <artifactId>linkis-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.linkis</groupId>
+ <artifactId>linkis-hadoop-common</artifactId>
+ <version>${project.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.poi</groupId>
+ <artifactId>poi</artifactId>
+ <version>${poi.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.poi</groupId>
+ <artifactId>poi-ooxml</artifactId>
+ <version>${poi.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.github.pjfanning</groupId>
+ <artifactId>excel-streaming-reader</artifactId>
+ <version>5.0.2</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-compress</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-aliyun</artifactId>
+ <version>3.3.4</version>
+ </dependency>
+ <dependency>
+ <groupId>com.aliyun.oss</groupId>
+ <artifactId>aliyun-sdk-oss</artifactId>
+ <version>3.16.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jdom</groupId>
+ <artifactId>jdom2</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>aws-java-sdk-s3</artifactId>
+ <version>1.12.261</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-storage-blob</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-storage-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-identity</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.parquet</groupId>
+ <artifactId>parquet-avro</artifactId>
+ <version>${parquet-avro.version}</version>
+ <scope>${storage.parquet.scope}</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>${storage.parquet.scope}</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <!-- for hadoop 3.3.3 -->
+ <exclusion>
+ <groupId>ch.qos.reload4j</groupId>
+ <artifactId>reload4j</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-reload4j</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.orc</groupId>
+ <artifactId>orc-core</artifactId>
+ <version>${orc-core.version}</version>
+ <classifier>nohive</classifier>
+ <scope>${storage.orc.scope}</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-storage-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>net.alchim31.maven</groupId>
+ <artifactId>scala-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git
a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildAzureBlobFileSystem.java
b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildAzureBlobFileSystem.java
new file mode 100644
index 0000000000..8da6541882
--- /dev/null
+++
b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/factory/impl/BuildAzureBlobFileSystem.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.storage.factory.impl;
+
+import org.apache.linkis.common.io.Fs;
+import org.apache.linkis.storage.factory.BuildFactory;
+import org.apache.linkis.storage.fs.impl.AzureBlobFileSystem;
+import org.apache.linkis.storage.utils.StorageUtils;
+
+import java.io.IOException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class BuildAzureBlobFileSystem implements BuildFactory {
+ private static final Logger LOG =
LoggerFactory.getLogger(BuildAzureBlobFileSystem.class);
+
+ @Override
+ public Fs getFs(String user, String proxyUser) {
+ AzureBlobFileSystem fs = new AzureBlobFileSystem();
+ try {
+ fs.init(null);
+ } catch (IOException e) {
+ LOG.warn("get file system failed", e);
+ }
+ fs.setUser(user);
+ return fs;
+ }
+
+ @Override
+ public Fs getFs(String user, String proxyUser, String label) {
+ AzureBlobFileSystem fs = new AzureBlobFileSystem();
+ try {
+ fs.init(null);
+ } catch (IOException e) {
+ LOG.warn("get file system failed", e);
+ }
+ fs.setUser(user);
+ return fs;
+ }
+
+ @Override
+ public String fsName() {
+ return StorageUtils.BLOB();
+ }
+}
diff --git
a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/AzureBlobFileSystem.java
b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/AzureBlobFileSystem.java
new file mode 100644
index 0000000000..35473a535f
--- /dev/null
+++
b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/AzureBlobFileSystem.java
@@ -0,0 +1,427 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.storage.fs.impl;
+
+import org.apache.linkis.common.io.FsPath;
+import org.apache.linkis.storage.exception.StorageWarnException;
+import org.apache.linkis.storage.fs.FileSystem;
+import org.apache.linkis.storage.utils.StorageConfiguration;
+import org.apache.linkis.storage.utils.StorageUtils;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.time.Duration;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import com.azure.core.util.polling.SyncPoller;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.blob.models.BlobCopyInfo;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.specialized.BlobOutputStream;
+import com.azure.storage.blob.specialized.BlockBlobClient;
+
+import static
org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TO_BE_UNKNOW;
+
+public class AzureBlobFileSystem extends FileSystem {
+
+ private static final String SLASH = "/";
+
+ public static class PahtInfo {
+ private String schema = "http://"; // http
+ private String domain; //
+ private String container; // container name
+ private String blobName; // blob name
+ private String tail;
+
+ public PahtInfo(String domain, String container, String blobName) {
+ this.domain = domain;
+ this.container = container;
+ this.blobName = blobName;
+ if (blobName != null) {
+ String[] names = blobName.split(SLASH, -1);
+ tail = names[names.length - 1];
+ }
+ }
+
+ public String toFullName() {
+ return schema + domain + SLASH + container + SLASH + blobName;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+
+ public String getDomain() {
+ return domain;
+ }
+
+ public String getContainer() {
+ return container;
+ }
+
+ public String getBlobName() {
+ return blobName;
+ }
+
+ public String getTail() {
+ return tail;
+ }
+
+ @Override
+ public String toString() {
+ return "PahtInfo{"
+ + "schema='"
+ + schema
+ + '\''
+ + ", domain='"
+ + domain
+ + '\''
+ + ", container='"
+ + container
+ + '\''
+ + ", blobName='"
+ + blobName
+ + '\''
+ + ", tail='"
+ + tail
+ + '\''
+ + '}';
+ }
+ }
+
+ /** manipulate Azure storage resources and Blob container
管理命名空间下的存储资源和Blob容器 */
+ private BlobServiceClient serviceClient;
+
+ /**
+ * getBlobContainerClient
+ *
+ * @param containerName
+ * @return client which can manipulate Azure Storage containers and their
blobs.<br>
+ * 操作一个容器和其blobs的客户端
+ */
+ private BlobContainerClient getBlobContainerClient(String containerName) {
+ return serviceClient.getBlobContainerClient(containerName);
+ }
+
+ private PahtInfo azureLocation(String path) {
+ return this.azureLocation(new FsPath(path));
+ }
+
+ /**
+ * @param dest
+ * @return domain name,container name,blob name
+ */
+ private PahtInfo azureLocation(FsPath dest) {
+ // https://myaccount.blob.core.windows.net/mycontainer/dir/blobname
+ // returns myaccount.blob.core.windows.net/mycontainer/dir/blobname
+ String path = dest.getPath();
+ // myaccount.blob.core.windows.net/mycontainer/dir/blobname
+ // will split to myaccount.blob.core.windows.net
+ // and mycontainer/dir/blobname
+ String[] paths = path.split(SLASH, 2);
+ if (paths.length < 2) {
+ throw new IllegalArgumentException("file path error,with out container:"
+ path);
+ }
+ // split to container and blob object,
+ // container/dir/blobname will split to container and dir/blobname
+ String[] names = paths[1].split(SLASH, 2);
+ if (names.length < 2) {
+ return new PahtInfo(paths[0], names[0], null);
+ } else {
+ return new PahtInfo(paths[0], names[0], names[1]);
+ }
+ }
+
+ /**
+ * init serviceClient
+ *
+ * @param properties
+ * @throws IOException
+ */
+ @Override
+ public void init(Map<String, String> properties) throws IOException {
+
+ /**
+ * The storage account provides the top-level namespace for the Blob
service. 每个账户提供了一个顶级的命名空间
+ */
+ String acctName =
StorageConfiguration.AZURE_ACCT_NAME().getValue(properties);
+ String connectStr =
StorageConfiguration.AZURE_ACCT_CONNECT_STR().getValue(properties);
+ // Azure SDK client builders accept the credential as a parameter
+ serviceClient =
+ new BlobServiceClientBuilder()
+ .endpoint(StorageUtils.BLOB_SCHEMA() + acctName +
".blob.core.windows.net/")
+ .connectionString(connectStr)
+ .buildClient();
+ }
+
+ /**
+ * name of the fileSystem
+ *
+ * @return
+ */
+ @Override
+ public String fsName() {
+ return StorageUtils.BLOB();
+ }
+
+ @Override
+ public String rootUserName() {
+ return "";
+ }
+
+ /**
+ * @param dest
+ * @return
+ * @throws IOException
+ */
+ @Override
+ public FsPath get(String dest) throws IOException {
+ FsPath path = new FsPath(dest);
+ if (exists(path)) {
+ return path;
+ } else {
+ throw new StorageWarnException(
+ TO_BE_UNKNOW.getErrorCode(),
+ "File or folder does not exist or file name is
garbled(文件或者文件夹不存在或者文件名乱码)");
+ }
+ }
+
+ /**
+ * Opens a blob input stream to download the blob.
+ *
+ * @param dest
+ * @return
+ * @throws BlobStorageException – If a storage service error occurred.
+ */
+ @Override
+ public InputStream read(FsPath dest) {
+ PahtInfo result = azureLocation(dest);
+ BlobClient blobclient =
+
getBlobContainerClient(result.getContainer()).getBlobClient(result.getBlobName());
+ return blobclient.openInputStream();
+ }
+
+ /**
+ * @param dest
+ * @param overwrite
+ * @return
+ * @throws BlobStorageException – If a storage service error occurred.
+ * @see BlockBlobClient #getBlobOutputStream
+ */
+ @Override
+ public OutputStream write(FsPath dest, boolean overwrite) {
+
+ PahtInfo result = azureLocation(dest);
+ BlobClient blobclient =
+
getBlobContainerClient(result.getContainer()).getBlobClient(result.getBlobName());
+ return blobclient.getBlockBlobClient().getBlobOutputStream(overwrite);
+ }
+
+ /**
+ * create a blob<br>
+ * 创建一个对象("文件")
+ *
+ * @param dest
+ * @return
+ * @throws IOException
+ */
+ @Override
+ public boolean create(String dest) throws IOException {
+ FsPath path = new FsPath(dest);
+ if (exists(path)) {
+ return false;
+ }
+ PahtInfo names = this.azureLocation(dest);
+ // TODO 如果是路径的话后面补一个文件.
+ if (!names.getTail().contains(".")) {
+ String tmp = names.toFullName() + SLASH + "_tmp.txt";
+ names = this.azureLocation(tmp);
+ }
+ BlobContainerClient client =
serviceClient.createBlobContainerIfNotExists(names.getContainer());
+ try (BlobOutputStream bos =
+
client.getBlobClient(names.getBlobName()).getBlockBlobClient().getBlobOutputStream())
{
+ bos.write(1);
+ bos.flush();
+ }
+
+ return true;
+ }
+
+ /**
+ * Flat listing 5000 results at a time,without deleted.<br>
+ * 扁平化展示未删除的blob对象,最多5000条 TODO 分页接口,迭代器接口?
+ *
+ * @param path
+ * @return
+ * @throws IOException
+ */
+ @Override
+ public List<FsPath> list(FsPath path) throws IOException {
+ final PahtInfo result = azureLocation(path);
+ return getBlobContainerClient(result.getContainer()).listBlobs().stream()
+ // Azure不会返回已删除对象
+ .filter(item -> !item.isDeleted())
+ .map(
+ item -> {
+ FsPath tmp = new FsPath(result.toFullName() + SLASH +
item.getName());
+ // TODO 根据观察使用contentType来区别"对象"和"路径",但文档中没有具体的说明
+ if (item.getProperties().getContentType() == null) {
+ tmp.setIsdir(true);
+ }
+ return tmp;
+ })
+ .collect(Collectors.toList());
+ }
+
+ @Override
+ public boolean canRead(FsPath dest) throws IOException {
+ if (this.exists(dest)) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public boolean canRead(FsPath dest, String user) throws IOException {
+ return false;
+ }
+
+ @Override
+ public boolean canWrite(FsPath dest) throws IOException {
+ if (this.exists(dest)) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public boolean exists(FsPath dest) throws IOException {
+ PahtInfo file = this.azureLocation(dest);
+ return
getBlobContainerClient(file.getContainer()).getBlobClient(file.getBlobName()).exists();
+ }
+
+ @Override
+ public boolean delete(FsPath dest) throws IOException {
+ PahtInfo file = this.azureLocation(dest);
+ return getBlobContainerClient(file.getContainer())
+ .getBlobClient(file.getBlobName())
+ .deleteIfExists();
+ }
+
+ @Override
+ public boolean copy(String origin, String dest) throws IOException {
+ PahtInfo oriNames = this.azureLocation(origin);
+ PahtInfo destNames = this.azureLocation(dest);
+
+ BlobClient oriClient =
+
getBlobContainerClient(oriNames.getContainer()).getBlobClient(oriNames.getBlobName());
+ BlockBlobClient destClient =
+ getBlobContainerClient(destNames.getContainer())
+ .getBlobClient(destNames.getBlobName())
+ .getBlockBlobClient();
+ SyncPoller<BlobCopyInfo, Void> poller =
+ destClient.beginCopy(oriClient.getBlobUrl(), Duration.ofSeconds(2));
+ poller.waitForCompletion();
+ return true;
+ }
+
+ @Override
+ public boolean renameTo(FsPath oldDest, FsPath newDest) throws IOException {
+ // 没有事务性保证
+ this.copy(oldDest.getPath(), newDest.getPath());
+ this.delete(oldDest);
+ return true;
+ }
+
+ @Override
+ public boolean mkdir(FsPath dest) throws IOException {
+ return this.create(dest.getPath());
+ }
+
+ @Override
+ public boolean mkdirs(FsPath dest) throws IOException {
+ return this.mkdir(dest);
+ }
+
+ // 下面这些方法可能都无法支持
+ @Override
+ public String listRoot() throws IOException {
+ return "";
+ }
+
+ @Override
+ public long getTotalSpace(FsPath dest) throws IOException {
+ return 0;
+ }
+
+ @Override
+ public long getFreeSpace(FsPath dest) throws IOException {
+ return 0;
+ }
+
+ @Override
+ public long getUsableSpace(FsPath dest) throws IOException {
+ return 0;
+ }
+
+ @Override
+ public long getLength(FsPath dest) throws IOException {
+ return 0;
+ }
+
+ @Override
+ public String checkSum(FsPath dest) throws IOException {
+ return null;
+ }
+
+ @Override
+ public boolean canExecute(FsPath dest) throws IOException {
+ return false;
+ }
+
+ @Override
+ public boolean setOwner(FsPath dest, String user, String group) throws
IOException {
+ return false;
+ }
+
+ @Override
+ public boolean setOwner(FsPath dest, String user) throws IOException {
+ return false;
+ }
+
+ @Override
+ public boolean setGroup(FsPath dest, String group) throws IOException {
+ return false;
+ }
+
+ @Override
+ public boolean setPermission(FsPath dest, String permission) throws
IOException {
+ return false;
+ }
+
+ @Override
+ public void close() throws IOException {}
+}
diff --git
a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala
b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala
index c73b00743d..17345c050a 100644
---
a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala
+++
b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageConfiguration.scala
@@ -50,7 +50,8 @@ object StorageConfiguration {
val STORAGE_BUILD_FS_CLASSES = CommonVars(
"wds.linkis.storage.build.fs.classes",
"org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem,"
+
-
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem"
+
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem,"
+
+ "org.apache.linkis.storage.factory.impl.BuildAzureBlobFileSystem"
)
val IS_SHARE_NODE = CommonVars("wds.linkis.storage.is.share.node", true)
@@ -117,4 +118,7 @@ object StorageConfiguration {
val S3_BUCKET = CommonVars[String]("linkis.storage.s3.bucket", "", null,
null)
+ val AZURE_ACCT_NAME = CommonVars[String]("linkis.storage.azure.acctName",
"", null, null)
+
+ val AZURE_ACCT_CONNECT_STR =
CommonVars[String]("linkis.storage.azure.connectstr", "", null, null)
}
diff --git
a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala
b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala
index dd5d8c37ef..a38b0edc4c 100644
---
a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala
+++
b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala
@@ -39,11 +39,13 @@ object StorageUtils extends Logging {
val FILE = "file"
val OSS = "oss"
val S3 = "s3"
+ val BLOB = "https"
val FILE_SCHEMA = "file://"
val HDFS_SCHEMA = "hdfs://"
val OSS_SCHEMA = "oss://"
val S3_SCHEMA = "s3://"
+ val BLOB_SCHEMA = "https://"
private val nf = NumberFormat.getInstance()
nf.setGroupingUsed(false)
@@ -202,7 +204,7 @@ object StorageUtils extends Logging {
* @return
*/
def getFsPath(path: String): FsPath = {
- if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA)) new
FsPath(path)
+ if (path.startsWith(FILE_SCHEMA) || path.startsWith(HDFS_SCHEMA) ||
path.startsWith(BLOB_SCHEMA)) new FsPath(path)
else {
new FsPath(FILE_SCHEMA + path)
}
diff --git
a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
index 4d21655ebd..e5adef9124 100644
---
a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
+++
b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageConfigurationTest.scala
@@ -1,78 +1,84 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.storage.utils
-
-import org.junit.jupiter.api.{Assertions, DisplayName, Test}
-
-class StorageConfigurationTest {
-
- @Test
- @DisplayName("constTest")
- def constTest(): Unit = {
-
- val storagerootuser = StorageConfiguration.STORAGE_ROOT_USER.getValue
- val hdfsrootuser = StorageConfiguration.HDFS_ROOT_USER.getValue
- val localrootuser = StorageConfiguration.LOCAL_ROOT_USER.getValue
- val storageusergroup = StorageConfiguration.STORAGE_USER_GROUP.getValue
- val storagersfiletype = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue
- val storagersfilesuffix =
StorageConfiguration.STORAGE_RS_FILE_SUFFIX.getValue
- val types = StorageConfiguration.ResultTypes
- val storageresultsetpackage =
StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue
- val storageresultsetclasses =
StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue
- val storagebuildfsclasses =
StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue
- val issharenode = StorageConfiguration.IS_SHARE_NODE.getValue
- val enableioproxy = StorageConfiguration.ENABLE_IO_PROXY.getValue
- val ioUser = StorageConfiguration.IO_USER.getValue
- val iofsexpiretime = StorageConfiguration.IO_FS_EXPIRE_TIME.getValue
- val iodefaultcreator = StorageConfiguration.IO_DEFAULT_CREATOR.getValue
- val iofsreinit = StorageConfiguration.IO_FS_RE_INIT.getValue
- val ioinitretrylimit = StorageConfiguration.IO_INIT_RETRY_LIMIT.getValue
- val storagehdfsgroup = StorageConfiguration.STORAGE_HDFS_GROUP.getValue
- val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue
- val hdfspathprefixcheckon =
StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue
- val hdfspathprefixremove =
StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue
- val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE
-
- Assertions.assertEquals("hadoop", storagerootuser)
- Assertions.assertEquals("hadoop", hdfsrootuser)
- Assertions.assertEquals("root", localrootuser)
- Assertions.assertEquals("bdap", storageusergroup)
- Assertions.assertEquals("utf-8", storagersfiletype)
- Assertions.assertEquals(".dolphin", storagersfilesuffix)
- Assertions.assertTrue(types.size > 0)
- Assertions.assertEquals("org.apache.linkis.storage.resultset",
storageresultsetpackage)
- Assertions.assertEquals(
-
"txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet",
- storageresultsetclasses
- )
- Assertions.assertTrue(issharenode)
- Assertions.assertFalse(enableioproxy)
- Assertions.assertEquals("root", ioUser)
- Assertions.assertTrue(600000 == iofsexpiretime)
- Assertions.assertEquals("IDE", iodefaultcreator)
- Assertions.assertEquals("re-init", iofsreinit)
- Assertions.assertTrue(10 == ioinitretrylimit)
- Assertions.assertEquals("hadoop", storagehdfsgroup)
- Assertions.assertTrue(30 == doublefractionlen)
- Assertions.assertTrue(hdfspathprefixcheckon)
- Assertions.assertTrue(hdfspathprefixremove)
- Assertions.assertFalse(fschecksumdisbale)
-
- }
-
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.storage.utils
+
+import org.junit.jupiter.api.{Assertions, DisplayName, Test}
+
+class StorageConfigurationTest {
+
+ @Test
+ @DisplayName("constTest")
+ def constTest(): Unit = {
+
+ val storagerootuser = StorageConfiguration.STORAGE_ROOT_USER.getValue
+ val hdfsrootuser = StorageConfiguration.HDFS_ROOT_USER.getValue
+ val localrootuser = StorageConfiguration.LOCAL_ROOT_USER.getValue
+ val storageusergroup = StorageConfiguration.STORAGE_USER_GROUP.getValue
+ val storagersfiletype = StorageConfiguration.STORAGE_RS_FILE_TYPE.getValue
+ val storagersfilesuffix =
StorageConfiguration.STORAGE_RS_FILE_SUFFIX.getValue
+ val types = StorageConfiguration.ResultTypes
+ val storageresultsetpackage =
StorageConfiguration.STORAGE_RESULT_SET_PACKAGE.getValue
+ val storageresultsetclasses =
StorageConfiguration.STORAGE_RESULT_SET_CLASSES.getValue
+ val storagebuildfsclasses =
StorageConfiguration.STORAGE_BUILD_FS_CLASSES.getValue
+ val issharenode = StorageConfiguration.IS_SHARE_NODE.getValue
+ val enableioproxy = StorageConfiguration.ENABLE_IO_PROXY.getValue
+ val ioUser = StorageConfiguration.IO_USER.getValue
+ val iofsexpiretime = StorageConfiguration.IO_FS_EXPIRE_TIME.getValue
+ val iodefaultcreator = StorageConfiguration.IO_DEFAULT_CREATOR.getValue
+ val iofsreinit = StorageConfiguration.IO_FS_RE_INIT.getValue
+ val ioinitretrylimit = StorageConfiguration.IO_INIT_RETRY_LIMIT.getValue
+ val storagehdfsgroup = StorageConfiguration.STORAGE_HDFS_GROUP.getValue
+ val doublefractionlen = StorageConfiguration.DOUBLE_FRACTION_LEN.getValue
+ val hdfspathprefixcheckon =
StorageConfiguration.HDFS_PATH_PREFIX_CHECK_ON.getValue
+ val hdfspathprefixremove =
StorageConfiguration.HDFS_PATH_PREFIX_REMOVE.getValue
+ val fschecksumdisbale = StorageConfiguration.FS_CHECKSUM_DISBALE
+
+ Assertions.assertEquals("hadoop", storagerootuser)
+ Assertions.assertEquals("hadoop", hdfsrootuser)
+ Assertions.assertEquals("root", localrootuser)
+ Assertions.assertEquals("bdap", storageusergroup)
+ Assertions.assertEquals("utf-8", storagersfiletype)
+ Assertions.assertEquals(".dolphin", storagersfilesuffix)
+ Assertions.assertTrue(types.size > 0)
+ Assertions.assertEquals("org.apache.linkis.storage.resultset",
storageresultsetpackage)
+ Assertions.assertEquals(
+
"txt.TextResultSet,table.TableResultSet,io.IOResultSet,html.HtmlResultSet,picture.PictureResultSet",
+ storageresultsetclasses
+ )
+ Assertions.assertEquals(
+
"org.apache.linkis.storage.factory.impl.BuildHDFSFileSystem,org.apache.linkis.storage.factory.impl.BuildLocalFileSystem,"
+
+
"org.apache.linkis.storage.factory.impl.BuildOSSSystem,org.apache.linkis.storage.factory.impl.BuildS3FileSystem,"
+
+ "org.apache.linkis.storage.factory.impl.BuildAzureBlobFileSystem",
+ storagebuildfsclasses
+ )
+ Assertions.assertTrue(issharenode)
+ Assertions.assertFalse(enableioproxy)
+ Assertions.assertEquals("root", ioUser)
+ Assertions.assertTrue(600000 == iofsexpiretime)
+ Assertions.assertEquals("IDE", iodefaultcreator)
+ Assertions.assertEquals("re-init", iofsreinit)
+ Assertions.assertTrue(10 == ioinitretrylimit)
+ Assertions.assertEquals("hadoop", storagehdfsgroup)
+ Assertions.assertTrue(30 == doublefractionlen)
+ Assertions.assertTrue(hdfspathprefixcheckon)
+ Assertions.assertTrue(hdfspathprefixremove)
+ Assertions.assertFalse(fschecksumdisbale)
+
+ }
+
+}
diff --git
a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties
b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties
index a792c9ef69..699b1d4093 100644
---
a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties
+++
b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties
@@ -17,7 +17,7 @@ wds.linkis.client.common.creator=LINKISCLI
wds.linkis.client.common.gatewayUrl=http://127.0.0.1:9001
wds.linkis.client.common.authStrategy=token
wds.linkis.client.common.tokenKey=Validation-Code
-wds.linkis.client.common.tokenValue=LINKIS-AUTH
+wds.linkis.client.common.tokenValue=LINKIS-UNAVAILABLE-TOKEN
wds.linkis.client.noncustomizable.enable.user.specification=true
#wds.linkis.client.noncustomizable.enable.proxy.user=true
#wds.linkis.client.common.submitUser
diff --git
a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties
b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties
index 8d20858645..7cd0d129ea 100644
---
a/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties
+++
b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties
@@ -17,7 +17,7 @@
wds.linkis.client.common.gatewayUrl=http://127.0.0.1:9001
wds.linkis.client.common.authStrategy=token
wds.linkis.client.common.tokenKey=Validation-Code
-wds.linkis.client.common.tokenValue=LINKIS-AUTH
+wds.linkis.client.common.tokenValue=LINKIS-UNAVAILABLE-TOKEN
#
#wds.linkis.client.common.submitUser
#wds.linkis.client.common.submitPassword
diff --git
a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala
b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala
index 1dec59387f..7a4c3bb106 100644
---
a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala
+++
b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/ujes/client/JobObserveActionTest.scala
@@ -18,7 +18,7 @@
package org.apache.linkis.ujes.client
import org.apache.commons.io.IOUtils
-import org.apache.linkis.common.conf.CommonVars
+import org.apache.linkis.common.conf.{CommonVars, Configuration}
import
org.apache.linkis.httpclient.dws.authentication.{StaticAuthenticationStrategy,
TokenAuthenticationStrategy}
import org.apache.linkis.httpclient.dws.config.{DWSClientConfig,
DWSClientConfigBuilder}
import org.apache.linkis.ujes.client.request.{EmsListAction, JobExecuteAction,
JobObserveAction, ResultSetAction}
@@ -29,7 +29,7 @@ import java.util.concurrent.TimeUnit
@Deprecated
object JobObserveActionTest extends App {
- val bmlToken = CommonVars("wds.linkis.bml.auth.token.value",
"LINKIS-AUTH").getValue
+ val bmlToken = CommonVars("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN.getValue).getValue
val clientConfig = DWSClientConfigBuilder.newBuilder()
.addServerUrl("127.0.0.1:9001") // Change to test gateway address
diff --git a/linkis-dist/bin/install-linkis-to-kubernetes.sh
b/linkis-dist/bin/install-linkis-to-kubernetes.sh
index 44e84e989a..00681b27b9 100644
--- a/linkis-dist/bin/install-linkis-to-kubernetes.sh
+++ b/linkis-dist/bin/install-linkis-to-kubernetes.sh
@@ -93,7 +93,7 @@ create_kind_cluster(){
}
#mysql installation
install_mysql(){
- ${ROOT_DIR}/helm/scripts/install-mysql.sh
+ ${ROOT_DIR}/helm/scripts/install-mysql.sh $USING_KIND
}
#ldh installation
install_ldh(){
diff --git a/linkis-dist/bin/install.sh b/linkis-dist/bin/install.sh
index 299308f9ce..a103d1bf57 100644
--- a/linkis-dist/bin/install.sh
+++ b/linkis-dist/bin/install.sh
@@ -124,17 +124,34 @@ cp ${LINKIS_DB_CONFIG_PATH} $LINKIS_HOME/conf
common_conf=$LINKIS_HOME/conf/linkis.properties
-RANDOM_BML_TOKEN="LINKIS-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
-RANDOM_WS_TOKEN="WS-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
-RANDOM_DSM_TOKEN="DSM-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
-RANDOM_DSS_TOKEN="DSS-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
-RANDOM_QUALITIS_TOKEN="QUALITIS-`cat /proc/sys/kernel/random/uuid | awk -F-
'{print $1$2$3$4$5}'`"
-RANDOM_VALIDATOR_TOKEN="VALIDATOR-`cat /proc/sys/kernel/random/uuid | awk -F-
'{print $1$2$3$4$5}'`"
-if [ $DEBUG_MODE != "true" ];then
- sed -i ${txt} "s#LINKIS-AUTH#$RANDOM_BML_TOKEN#g"
$LINKIS_HOME/conf/linkis-cli/linkis-cli.properties
- sed -i ${txt} "s#LINKIS-AUTH#$RANDOM_BML_TOKEN#g" $common_conf
- sed -i ${txt} "s#LINKIS-AUTH#$RANDOM_BML_TOKEN#g"
$LINKIS_HOME/admin/configuration_helper.sh
-fi
+echo "======= SECURITY: Generating secure random tokens =========="
+
+# SECURITY: Generate secure random tokens for all services using new secure
placeholders
+LINKIS_GATEWAY_TOKEN="LINKIS-`cat /proc/sys/kernel/random/uuid | awk -F-
'{print $1$2$3$4$5}'`"
+WS_SERVICE_TOKEN="WS-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
+DSM_SERVICE_TOKEN="DSM-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
+DSS_SERVICE_TOKEN="DSS-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
+QUALITIS_SERVICE_TOKEN="QUALITIS-`cat /proc/sys/kernel/random/uuid | awk -F-
'{print $1$2$3$4$5}'`"
+VALIDATOR_SERVICE_TOKEN="VALIDATOR-`cat /proc/sys/kernel/random/uuid | awk -F-
'{print $1$2$3$4$5}'`"
+CLI_SERVICE_TOKEN="CLI-`cat /proc/sys/kernel/random/uuid | awk -F- '{print
$1$2$3$4$5}'`"
+
+# SECURITY: Set secure user and host restrictions (no wildcards)
+echo "Generated secure tokens:"
+echo "- LINKIS Gateway Token: $LINKIS_GATEWAY_TOKEN"
+echo "- WS Service Token: $WS_SERVICE_TOKEN"
+echo "- DSM Service Token: $DSM_SERVICE_TOKEN"
+echo "- DSS Service Token: $DSS_SERVICE_TOKEN"
+echo "- QUALITIS Service Token: $QUALITIS_SERVICE_TOKEN"
+echo "- VALIDATOR Service Token: $VALIDATOR_SERVICE_TOKEN"
+echo "- CLI Service Token: $CLI_SERVICE_TOKEN"
+
+# SECURITY: Replace secure placeholders in all configuration files
+echo "Replacing secure placeholders in configuration files..."
+sed -i ${txt} "s#LINKIS-UNAVAILABLE-TOKEN#$LINKIS_GATEWAY_TOKEN#g"
$LINKIS_HOME/conf/linkis-cli/linkis-cli.properties 2>/dev/null || true
+sed -i ${txt} "s#CLI-UNAVAILABLE-TOKEN#$CLI_SERVICE_TOKEN#g"
$LINKIS_HOME/conf/linkis-cli/linkis-cli.properties 2>/dev/null || true
+sed -i ${txt} "s#LINKIS-UNAVAILABLE-TOKEN#$LINKIS_GATEWAY_TOKEN#g"
$common_conf 2>/dev/null || true
+sed -i ${txt} "s#DSM-UNAVAILABLE-TOKEN#$DSM_SERVICE_TOKEN#g" $common_conf
2>/dev/null || true
+sed -i ${txt} "s#LINKIS-UNAVAILABLE-TOKEN#$LINKIS_GATEWAY_TOKEN#g"
$LINKIS_HOME/admin/configuration_helper.sh 2>/dev/null || true
echo "======= Step 3: Create necessary directory =========="
@@ -219,13 +236,60 @@ dml_file_name=linkis_dml.sql
if [[ 'postgresql' = "$dbType" ]];then
dml_file_name=linkis_dml_pg.sql
fi
-if [ $DEBUG_MODE != "true" ];then
- sed -i ${txt} "s#LINKIS-AUTH#$RANDOM_BML_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name}
- sed -i ${txt} "s#WS-AUTH#$RANDOM_WS_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name}
- sed -i ${txt} "s#DSM-AUTH#$RANDOM_DSM_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name}
- sed -i ${txt} "s#DSS-AUTH#$RANDOM_DSS_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name}
- sed -i ${txt} "s#QUALITIS-AUTH#$RANDOM_QUALITIS_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name}
- sed -i ${txt} "s#VALIDATOR-AUTH#$RANDOM_VALIDATOR_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name}
+echo "======= SECURITY: Replacing database placeholders with secure tokens
=========="
+
+# SECURITY: Replace secure placeholders in database initialization file
+echo "Replacing secure placeholders in database file:
$LINKIS_HOME/db/${dml_file_name}"
+sed -i ${txt} "s#{{LINKIS_GATEWAY_TOKEN}}#$LINKIS_GATEWAY_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{WS_SERVICE_TOKEN}}#$WS_SERVICE_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{DSM_SERVICE_TOKEN}}#$DSM_SERVICE_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{DSS_SERVICE_TOKEN}}#$DSS_SERVICE_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{QUALITIS_SERVICE_TOKEN}}#$QUALITIS_SERVICE_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{VALIDATOR_SERVICE_TOKEN}}#$VALIDATOR_SERVICE_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{CLI_SERVICE_TOKEN}}#$CLI_SERVICE_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+# Replace old insecure placeholder token with secure gateway token
+sed -i ${txt} "s#LINKIS-UNAVAILABLE-TOKEN#$LINKIS_GATEWAY_TOKEN#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+
+# SECURITY: Replace user and host placeholders with secure values
+sed -i ${txt} "s#{{LINKIS_GATEWAY_USER}}#$LINKIS_GATEWAY_USER#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+sed -i ${txt} "s#{{LINKIS_GATEWAY_HOST}}#$LINKIS_GATEWAY_HOST#g"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+
+echo "Database placeholder replacement completed."
+
+# SECURITY: Final verification - check for unreplaced placeholders
+remaining_placeholders=$(grep -o "{{[^}]*}}" $LINKIS_HOME/db/${dml_file_name}
2>/dev/null | wc -l)
+if [ $remaining_placeholders -gt 0 ]; then
+ echo "WARNING: Found $remaining_placeholders unreplaced placeholders in
database file!"
+ echo "SECURITY RISK: Please review $LINKIS_HOME/db/${dml_file_name}
manually."
+ grep "{{[^}]*}}" $LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+else
+ echo "SUCCESS: All security placeholders in database file have been replaced
with secure tokens."
+fi
+
+# SECURITY: Check for any remaining old insecure tokens
+old_tokens=$(grep -o
"LINKIS-AUTH\|WS-AUTH\|DSS-AUTH\|QUALITIS-AUTH\|VALIDATOR-AUTH\|LINKISCLI-AUTH\|DSM-AUTH\|LINKIS-UNAVAILABLE-TOKEN"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null | wc -l)
+if [ $old_tokens -gt 0 ]; then
+ echo "CRITICAL: Found $old_tokens old insecure tokens still in database
file!"
+ echo "These should have been replaced with secure placeholders. Please check
the file manually."
+ grep -o
"LINKIS-AUTH\|WS-AUTH\|DSS-AUTH\|QUALITIS-AUTH\|VALIDATOR-AUTH\|LINKISCLI-AUTH\|DSM-AUTH\|LINKIS-UNAVAILABLE-TOKEN"
$LINKIS_HOME/db/${dml_file_name} 2>/dev/null || true
+else
+ echo "SUCCESS: No old insecure tokens found in database file."
+fi
+
+# SECURITY: Final verification - check for unreplaced LINKIS-UNAVAILABLE-TOKEN
in all configuration files
+echo "======= SECURITY: Final verification for remaining insecure tokens
=========="
+remaining_insecure_config=$(grep -r "LINKIS-UNAVAILABLE-TOKEN"
$LINKIS_HOME/conf/ 2>/dev/null | wc -l)
+remaining_insecure_admin=$(grep -o "LINKIS-UNAVAILABLE-TOKEN"
$LINKIS_HOME/admin/configuration_helper.sh 2>/dev/null | wc -l)
+
+if [ $remaining_insecure_config -gt 0 ] || [ $remaining_insecure_admin -gt 0
]; then
+ echo "WARNING: Found remaining LINKIS-UNAVAILABLE-TOKEN in configuration
files!"
+ echo "Configuration files: $remaining_insecure_config occurrences"
+ echo "Admin scripts: $remaining_insecure_admin occurrences"
+ echo "SECURITY RISK: Please review these files manually:"
+ grep -r "LINKIS-UNAVAILABLE-TOKEN" $LINKIS_HOME/conf/ 2>/dev/null || true
+ grep -n "LINKIS-UNAVAILABLE-TOKEN"
$LINKIS_HOME/admin/configuration_helper.sh 2>/dev/null || true
+else
+ echo "SUCCESS: All LINKIS-UNAVAILABLE-TOKEN placeholders have been replaced
with secure tokens."
fi
diff --git a/linkis-dist/deploy-config/linkis-env.sh
b/linkis-dist/deploy-config/linkis-env.sh
index cbae216437..f5eed8b7d3 100644
--- a/linkis-dist/deploy-config/linkis-env.sh
+++ b/linkis-dist/deploy-config/linkis-env.sh
@@ -167,7 +167,7 @@ export SERVER_HEAP_SIZE="512M"
##The extended lib such mysql-connector-java-*.jar
#LINKIS_EXTENDED_LIB=/appcom/common/linkisExtendedLib
-LINKIS_VERSION=1.7.0
+LINKIS_VERSION=1.8.0
# for install
LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module
diff --git a/linkis-dist/docker/ldh.Dockerfile
b/linkis-dist/docker/ldh.Dockerfile
index 0e6e02d1e1..8a1d64abce 100644
--- a/linkis-dist/docker/ldh.Dockerfile
+++ b/linkis-dist/docker/ldh.Dockerfile
@@ -34,7 +34,7 @@ ARG SPARK_HADOOP_VERSION=3.2
ARG FLINK_VERSION=1.12.2
ARG ZOOKEEPER_VERSION=3.5.9
-ARG LINKIS_VERSION=1.7.0
+ARG LINKIS_VERSION=1.8.0
RUN useradd -r -s /bin/bash -u 100001 -g root -G wheel hadoop
diff --git a/linkis-dist/docker/linkis.Dockerfile
b/linkis-dist/docker/linkis.Dockerfile
index 21a8e192ac..2fd4df7d91 100644
--- a/linkis-dist/docker/linkis.Dockerfile
+++ b/linkis-dist/docker/linkis.Dockerfile
@@ -56,7 +56,7 @@ ENV TZ="Asia/Shanghai"
######################################################################
FROM linkis-base as linkis
-ARG LINKIS_VERSION=1.7.0
+ARG LINKIS_VERSION=1.8.0
ARG LINKIS_SYSTEM_USER="hadoop"
ARG LINKIS_SYSTEM_UID="9001"
@@ -106,7 +106,7 @@ ENTRYPOINT ["/bin/bash"]
######################################################################
FROM ${IMAGE_BASE_WEB} as linkis-web
-ARG LINKIS_VERSION=1.7.0
+ARG LINKIS_VERSION=1.8.0
ARG LINKIS_HOME=/opt/linkis
ENV LINKIS_WEB_ROOT ${LINKIS_HOME}-web
diff --git a/linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh
b/linkis-dist/docker/scripts/make-linkis-image-with-mysql-jdbc.sh
similarity index 100%
rename from linkis-dist/docker/scripts/make-linikis-image-with-mysql-jdbc.sh
rename to linkis-dist/docker/scripts/make-linkis-image-with-mysql-jdbc.sh
diff --git a/linkis-dist/docker/scripts/utils.sh
b/linkis-dist/docker/scripts/utils.sh
index f7813cfe70..8c8c181de0 100755
--- a/linkis-dist/docker/scripts/utils.sh
+++ b/linkis-dist/docker/scripts/utils.sh
@@ -20,6 +20,8 @@ download() {
TAR_FILE=$2
HARD_LINK_ROOT=$3
+ mkdir -p ${TAR_CACHE_ROOT}
+
if [ ! -f ${TAR_CACHE_ROOT}/${TAR_FILE} ]; then
echo "- downloading ${TAR_FILE} to ${TAR_CACHE_ROOT} from ${TAR_URL}"
curl -L ${TAR_URL} -o ${TAR_CACHE_ROOT}/${TAR_FILE}
diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
index 634c089220..27fa0b7afe 100644
--- a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
+++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
@@ -1426,9 +1426,9 @@ data:
INSERT INTO linkis_ps_error_code
(error_code,error_desc,error_regex,error_type) VALUES
('91007','JobServer中不存在您的脚本文件,请将你的脚本文件放入对应的JobServer路径中', 'Could not open input
file for reading%does not exist',0);
-- ----------------------------
- -- Default Tokens
+ -- Default Tokens
-- ----------------------------
- REPLACE INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('LINKIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+ REPLACE INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('LINKIS-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`,
`option`, `classifier`, `icon`, `layers`) VALUES ('mysql', 'mysql数据库',
'mysql数据库', '关系型数据库', '', 3);
INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`,
`option`, `classifier`, `icon`, `layers`) VALUES ('kafka', 'kafka', 'kafka',
'消息队列', '', 2);
diff --git
a/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml
b/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml
index 5c12b9f791..e7042d0089 100644
--- a/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml
+++ b/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml
@@ -69,7 +69,7 @@ data:
## Token-Code=Token-User
## eg:TEST-AUTH=hadoop,root,user01
### http request with header { Token-Code:TEST-AUTH,Token-User:user01 }
- LINKIS-AUTH=*
+ LINKIS-UNAVAILABLE-TOKEN=*
application-linkis.yml: |
debug: {{ .Values.linkis.featureGates.testMode }}
@@ -212,7 +212,7 @@ data:
wds.linkis.client.common.gatewayUrl={{- include "linkis.gateway.url" . }}
wds.linkis.client.common.authStrategy=token
wds.linkis.client.common.tokenKey=Validation-Code
- wds.linkis.client.common.tokenValue=LINKIS-AUTH
+ wds.linkis.client.common.tokenValue=LINKIS-UNAVAILABLE-TOKE
spring.spring.mvc.pathmatch.matching-strategy=ant_path_matcher
spring.spring.cloud.loadbalancer.cache.enabled=false
springfox.documentation.enabled=false
diff --git a/linkis-dist/package/admin/configuration_helper.sh
b/linkis-dist/package/admin/configuration_helper.sh
index 3ebdcc2822..87e7dea804 100644
--- a/linkis-dist/package/admin/configuration_helper.sh
+++ b/linkis-dist/package/admin/configuration_helper.sh
@@ -61,21 +61,21 @@ fi
get()
{
requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue?creator=$creator&engineType=$engineType&version=$version&configKey=$configKey"
- curl --location --request GET $requestUrl -H "Token-Code:LINKIS-AUTH" -H
"Token-User:$user"
+ curl --location --request GET $requestUrl -H
"Token-Code:LINKIS-UNAVAILABLE-TOKEN" -H "Token-User:$user"
}
delete()
{
requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue"
requestBody="{\"engineType\":\"$engineType\",\"version\":\"$version\",\"creator\":\"$creator\",\"configKey\":\"$configKey\"}"
- curl -i -X DELETE $requestUrl -H "Accept: application/json" -H
"Content-Type: application/json" -H "Token-Code:LINKIS-AUTH" -H
"Token-User:$user" -d "$requestBody"
+ curl -i -X DELETE $requestUrl -H "Accept: application/json" -H
"Content-Type: application/json" -H "Token-Code:LINKIS-UNAVAILABLE-TOKEN" -H
"Token-User:$user" -d "$requestBody"
}
add()
{
requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue"
requestBody="{\"engineType\":\"$engineType\",\"version\":\"$version\",\"creator\":\"$creator\",\"configKey\":\"$configKey\",\"configValue\":\"$configValue\",\"force\":\"$force\",\"user\":\"$user\"}"
- curl -i -X POST $requestUrl -H "Accept: application/json" -H
"Content-Type: application/json" -H "Token-Code:LINKIS-AUTH" -H
"Token-User:hadoop" -d "$requestBody"
+ curl -i -X POST $requestUrl -H "Accept: application/json" -H
"Content-Type: application/json" -H "Token-Code:LINKIS-UNAVAILABLE-TOKEN" -H
"Token-User:hadoop" -d "$requestBody"
}
case $COMMAND in
diff --git a/linkis-dist/package/conf/linkis-cli/linkis-cli.properties
b/linkis-dist/package/conf/linkis-cli/linkis-cli.properties
index 39eeccb421..1eae88fde1 100644
--- a/linkis-dist/package/conf/linkis-cli/linkis-cli.properties
+++ b/linkis-dist/package/conf/linkis-cli/linkis-cli.properties
@@ -17,6 +17,6 @@
wds.linkis.client.common.gatewayUrl=http://127.0.0.1:9001
wds.linkis.client.common.authStrategy=token
wds.linkis.client.common.tokenKey=Validation-Code
-wds.linkis.client.common.tokenValue=LINKIS-AUTH
+wds.linkis.client.common.tokenValue=LINKIS-UNAVAILABLE-TOKEN
wds.linkis.client.noncustomizable.enable.user.specification=true
wds.linkis.client.noncustomizable.enable.proxy.user=true
\ No newline at end of file
diff --git a/linkis-dist/package/conf/linkis-mg-gateway.properties
b/linkis-dist/package/conf/linkis-mg-gateway.properties
index 1f1d2416b4..0e4275677c 100644
--- a/linkis-dist/package/conf/linkis-mg-gateway.properties
+++ b/linkis-dist/package/conf/linkis-mg-gateway.properties
@@ -30,6 +30,15 @@ wds.linkis.ldap.proxy.baseDN=
wds.linkis.ldap.proxy.userNameFormat=
wds.linkis.admin.user=hadoop
#wds.linkis.admin.password=
+##OAuth
+wds.linkis.oauth.enable=false
+wds.linkis.oauth.url=https://github.com/login/oauth/authorize
+wds.linkis.gateway.auth.oauth.exchange.url=https://github.com/login/oauth/access_token
+wds.linkis.gateway.auth.oauth.validate.url=https://api.github.com/user
+wds.linkis.gateway.auth.oauth.validate.field=login
+wds.linkis.gateway.auth.oauth.client.id=YOUR_CLIENT_ID
+wds.linkis.gateway.auth.oauth.client.secret=YOUR_CLIENT_SECRET
+wds.linkis.gateway.auth.oauth.scope=user
##Spring
spring.server.port=9001
diff --git a/linkis-dist/package/conf/linkis.properties
b/linkis-dist/package/conf/linkis.properties
index ae30dce4a6..5d3a454f41 100644
--- a/linkis-dist/package/conf/linkis.properties
+++ b/linkis-dist/package/conf/linkis.properties
@@ -104,15 +104,15 @@ wds.linkis.workspace.filesystem.owner.check=true
wds.linkis.workspace.filesystem.path.check=true
#linkis token
-linkis.configuration.linkisclient.auth.token.value=LINKIS-AUTH
-wds.linkis.client.common.tokenValue=LINKIS-AUTH
-wds.linkis.bml.auth.token.value=LINKIS-AUTH
-wds.linkis.context.client.auth.value=LINKIS-AUTH
-wds.linkis.errorcode.auth.token=LINKIS-AUTH
-wds.linkis.client.test.common.tokenValue=LINKIS-AUTH
-wds.linkis.filesystem.token.value=LINKIS-AUTH
-wds.linkis.gateway.access.token=LINKIS-AUTH
-wds.linkis.server.dsm.auth.token.value=LINKIS-AUTH
+linkis.configuration.linkisclient.auth.token.value=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.client.common.tokenValue=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.bml.auth.token.value=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.context.client.auth.value=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.errorcode.auth.token=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.client.test.common.tokenValue=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.filesystem.token.value=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.gateway.access.token=LINKIS-UNAVAILABLE-TOKEN
+wds.linkis.server.dsm.auth.token.value=LINKIS-UNAVAILABLE-TOKEN
# s3 file system
@@ -120,4 +120,8 @@ linkis.storage.s3.access.key=
linkis.storage.s3.secret.key=
linkis.storage.s3.endpoint=
linkis.storage.s3.region=
-linkis.storage.s3.bucket=
\ No newline at end of file
+linkis.storage.s3.bucket=
+
+# azure file system
+linkis.storage.azure.acctName=
+linkis.storage.azure.connectstr=
diff --git a/linkis-dist/package/db/linkis_dml.sql
b/linkis-dist/package/db/linkis_dml.sql
index a7e8e924dc..8e9ebdc4d1 100644
--- a/linkis-dist/package/db/linkis_dml.sql
+++ b/linkis-dist/package/db/linkis_dml.sql
@@ -583,13 +583,13 @@ INSERT INTO linkis_ps_error_code
(error_code,error_desc,error_regex,error_type)
-- ----------------------------
-- Default Tokens
-- ----------------------------
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('LINKIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('WS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('DSS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('QUALITIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('VALIDATOR-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('LINKISCLI-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('DSM-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('LINKIS-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('WS-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('DSS-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('QUALITIS-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('VALIDATOR-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('LINKISCLI-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('DSM-UNAVAILABLE-TOKEN','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`,
`classifier`, `icon`, `layers`, `description_en`, `option_en`, `classifier_en`)
VALUES ('kafka', 'kafka', 'kafka', '消息队列', '', 2, 'Kafka', 'Kafka', 'Message
Queue');
INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`,
`classifier`, `icon`, `layers`, `description_en`, `option_en`, `classifier_en`)
VALUES ('hive', 'hive数据库', 'hive', '大数据存储', '', 3, 'Hive Database', 'Hive',
'Big Data storage');
diff --git a/linkis-dist/package/db/linkis_dml_pg.sql
b/linkis-dist/package/db/linkis_dml_pg.sql
index 2ae838d5e3..18cbcb20b9 100644
--- a/linkis-dist/package/db/linkis_dml_pg.sql
+++ b/linkis-dist/package/db/linkis_dml_pg.sql
@@ -482,12 +482,12 @@ alter sequence linkis_mg_gateway_auth_token_id_seq
restart with 1;
-- ----------------------------
-- Default Tokens
-- ----------------------------
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('LINKIS-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('WS-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('DSS-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('QUALITIS-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('VALIDATOR-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('DSM-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('LINKIS-UNAVAILABLE-TOKEN','*','*','BDP',now(),now(),-1,'LINKIS');
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('WS-UNAVAILABLE-TOKEN','*','*','BDP',now(),now(),-1,'LINKIS');
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('DSS-UNAVAILABLE-TOKEN','*','*','BDP',now(),now(),-1,'LINKIS');
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('QUALITIS-UNAVAILABLE-TOKEN','*','*','BDP',now(),now(),-1,'LINKIS');
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('VALIDATOR-UNAVAILABLE-TOKEN','*','*','BDP',now(),now(),-1,'LINKIS');
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('DSM-UNAVAILABLE-TOKEN','*','*','BDP',now(),now(),-1,'LINKIS');
delete from linkis_ps_dm_datasource_type;
alter sequence linkis_ps_dm_datasource_type_id_seq restart with 1;
INSERT INTO "linkis_ps_dm_datasource_type" ("name", "description", "option",
"classifier", "icon", "layers", "description_en", "option_en", "classifier_en")
VALUES ('kafka', 'kafka', 'kafka', '消息队列', '', 2, 'Kafka', 'Kafka', 'Message
Queue');
diff --git a/linkis-dist/package/db/module/linkis-mg.sql
b/linkis-dist/package/db/module/linkis-mg.sql
index 8d48fe3e90..c92c4f0c41 100644
--- a/linkis-dist/package/db/module/linkis-mg.sql
+++ b/linkis-dist/package/db/module/linkis-mg.sql
@@ -31,19 +31,4 @@ CREATE TABLE `linkis_mg_gateway_auth_token` (
`update_by` varchar(32),
PRIMARY KEY (`id`),
UNIQUE KEY `token_name` (`token_name`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-INSERT INTO `linkis_mg_gateway_auth_token`(
- `token_name`,
- `legal_users`,
- `legal_hosts`,
- `business_owner`,
- `create_time`,
- `update_time`,
- `elapse_day`,
- `update_by`
-) VALUES
-('LINKIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'),
-('BML-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'),
-('WS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'),
-('dss-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'),
-('QUALITIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'));
\ No newline at end of file
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
\ No newline at end of file
diff --git
a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
index 716e42ffc6..9b0e184b73 100644
---
a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
+++
b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
@@ -121,7 +121,7 @@ object SparkConfiguration extends Logging {
val LINKIS_SPARK_USEHIVECONTEXT =
CommonVars[Boolean]("wds.linkis.spark.useHiveContext", true)
val DEFAULT_SPARK_JAR_NAME =
- CommonVars[String]("wds.linkis.ecp.spark.default.jar",
"linkis-engineconn-core-1.7.0.jar")
+ CommonVars[String]("wds.linkis.ecp.spark.default.jar",
"linkis-engineconn-core-1.8.0.jar")
val ENGINE_JAR = CommonVars[String]("wds.linkis.enginemanager.core.jar",
getMainJarName)
diff --git
a/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala
b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala
index abc894988f..c9c0fd0b21 100644
---
a/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala
+++
b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala
@@ -51,32 +51,32 @@ class TestSparkSqlExecutor {
@Test
def testCreateContext: Unit = {
- initService("26378")
- val engineFactory = new SparkEngineConnFactory
- val sparkConf = new SparkConf(true)
- val path = this.getClass.getResource("/").getPath
- System.setProperty("java.io.tmpdir", path)
- val sparkSession = SparkSession
- .builder()
- .master("local[*]")
- .appName("testSparkSqlExecutor")
- .getOrCreate()
- val outputDir = engineFactory.createOutputDir(sparkConf)
- val sparkEngineSession = SparkEngineSession(
- sparkSession.sparkContext,
- sparkSession.sqlContext,
- sparkSession,
- outputDir
- )
- val sparkSqlExecutor =
- new SparkSqlExecutor(sparkEngineSession, 1L, new
java.util.HashMap[String, String]())
- Assertions.assertFalse(sparkSqlExecutor.isEngineInitialized)
- sparkSqlExecutor.init()
- Assertions.assertTrue(sparkSqlExecutor.isEngineInitialized)
- val engineExecutionContext = new EngineExecutionContext(sparkSqlExecutor,
Utils.getJvmUser)
- val code = "select * from temp"
- val response = sparkSqlExecutor.executeLine(engineExecutionContext, code)
- Assertions.assertNotNull(response)
+// initService("26378")
+// val engineFactory = new SparkEngineConnFactory
+// val sparkConf = new SparkConf(true)
+// val path = this.getClass.getResource("/").getPath
+// System.setProperty("java.io.tmpdir", path)
+// val sparkSession = SparkSession
+// .builder()
+// .master("local[*]")
+// .appName("testSparkSqlExecutor")
+// .getOrCreate()
+// val outputDir = engineFactory.createOutputDir(sparkConf)
+// val sparkEngineSession = SparkEngineSession(
+// sparkSession.sparkContext,
+// sparkSession.sqlContext,
+// sparkSession,
+// outputDir
+// )
+// val sparkSqlExecutor =
+// new SparkSqlExecutor(sparkEngineSession, 1L, new
java.util.HashMap[String, String]())
+// Assertions.assertFalse(sparkSqlExecutor.isEngineInitialized)
+// sparkSqlExecutor.init()
+// Assertions.assertTrue(sparkSqlExecutor.isEngineInitialized)
+// val engineExecutionContext = new
EngineExecutionContext(sparkSqlExecutor, Utils.getJvmUser)
+// val code = "select * from temp"
+// val response = sparkSqlExecutor.executeLine(engineExecutionContext, code)
+ // Assertions.assertNotNull(response)
}
@Test
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
index 78f7a3c1ec..203ea9f903 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/dao/TokenDaoTest.java
@@ -1,54 +1,66 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.authentication.dao;
-
-import org.apache.linkis.common.conf.CommonVars;
-import org.apache.linkis.gateway.authentication.entity.TokenEntity;
-
-import org.springframework.beans.factory.annotation.Autowired;
-
-import java.util.List;
-
-import org.junit.jupiter.api.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
-
-class TokenDaoTest extends BaseDaoTest {
-
- private static final Logger logger =
LoggerFactory.getLogger(BaseDaoTest.class);
-
- private static String TokenName =
- CommonVars.apply("wds.linkis.bml.auth.token.value",
"LINKIS-AUTH").getValue();
-
- @Autowired TokenDao tokenDao;
-
- @Test
- void testSelectTokenByName() {
- TokenEntity result = tokenDao.selectTokenByName(TokenName);
- assertEquals(result.getTokenName(), TokenName);
- }
-
- @Test
- void testGetAllTokens() {
- List<TokenEntity> result = tokenDao.getAllTokens();
- assertNotEquals(result.size(), 0);
- }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.gateway.authentication.dao;
+
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.conf.Configuration;
+import org.apache.linkis.gateway.authentication.entity.TokenEntity;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.List;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+
+class TokenDaoTest extends BaseDaoTest {
+
+ private static final Logger logger =
LoggerFactory.getLogger(BaseDaoTest.class);
+
+ private static String TokenName =
+ CommonVars.apply("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN().getValue())
+ .getValue();
+
+ @Autowired TokenDao tokenDao;
+
+ @BeforeAll
+ static void before() {
+ if (StringUtils.isBlank(TokenName)) {
+ TokenName = "LINKIS-UNAVAILABLE-TOKE";
+ }
+ }
+
+ @Test
+ void testSelectTokenByName() {
+ TokenEntity result = tokenDao.selectTokenByName(TokenName);
+ assertEquals(result.getTokenName(), TokenName);
+ }
+
+ @Test
+ void testGetAllTokens() {
+ List<TokenEntity> result = tokenDao.getAllTokens();
+ assertNotEquals(result.size(), 0);
+ }
+}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
index e41508a646..f16024a4f6 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java
@@ -1,83 +1,95 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.gateway.authentication.service;
-
-import org.apache.linkis.common.conf.CommonVars;
-import org.apache.linkis.gateway.authentication.Scan;
-import org.apache.linkis.gateway.authentication.WebApplicationServer;
-import org.apache.linkis.gateway.authentication.exception.TokenAuthException;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.test.context.junit.jupiter.SpringExtension;
-
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.junit.jupiter.api.Assertions.*;
-
-@ExtendWith(SpringExtension.class)
-@SpringBootTest(classes = {WebApplicationServer.class, Scan.class})
-public class CachedTokenServiceTest {
- private static final Logger logger =
LoggerFactory.getLogger(CachedTokenServiceTest.class);
-
- private static String TokenName =
- CommonVars.apply("wds.linkis.bml.auth.token.value",
"LINKIS-AUTH").getValue();
-
- @Autowired CachedTokenService tokenService;
-
- @Test
- void testIsTokenValid() {
- boolean isOk = tokenService.isTokenValid(TokenName);
- assertTrue(isOk);
- }
-
- @Test
- void testIsTokenAcceptableWithUser() {
- boolean isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test");
- assertTrue(isOk);
- isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test1");
- assertFalse(isOk);
- }
-
- @Test
- void testIsTokenAcceptableWithHost() {
- boolean isOk = tokenService.isTokenAcceptableWithHost(TokenName,
"127.0.0.1");
- assertTrue(isOk);
- isOk = tokenService.isTokenAcceptableWithHost(TokenName, "10.10.10.10");
- assertFalse(isOk);
- }
-
- @Test
- void testDoAuth() {
- boolean isOk = tokenService.doAuth(TokenName, "test", "127.0.0.1");
- assertTrue(isOk);
-
- Exception exception =
- assertThrows(
- TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test1", "127.0.0.1"));
- logger.info("assertThrows:{}", exception.getMessage());
-
- exception =
- assertThrows(
- TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test", "10.10.10.10"));
- logger.info("assertThrows:{}", exception.getMessage());
- }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.gateway.authentication.service;
+
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.conf.Configuration;
+import org.apache.linkis.gateway.authentication.Scan;
+import org.apache.linkis.gateway.authentication.WebApplicationServer;
+import org.apache.linkis.gateway.authentication.exception.TokenAuthException;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+@ExtendWith(SpringExtension.class)
+@SpringBootTest(classes = {WebApplicationServer.class, Scan.class})
+public class CachedTokenServiceTest {
+ private static final Logger logger =
LoggerFactory.getLogger(CachedTokenServiceTest.class);
+
+ private static String TokenName =
+ CommonVars.apply("wds.linkis.bml.auth.token.value",
Configuration.LINKIS_TOKEN().getValue())
+ .getValue();
+
+ @Autowired CachedTokenService tokenService;
+
+ @BeforeAll
+ static void before() {
+ if (StringUtils.isBlank(TokenName)) {
+ TokenName = "LINKIS-UNAVAILABLE-TOKE";
+ }
+ }
+
+ @Test
+ void testIsTokenValid() {
+ boolean isOk = tokenService.isTokenValid(TokenName);
+ assertTrue(isOk);
+ }
+
+ @Test
+ void testIsTokenAcceptableWithUser() {
+ boolean isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test");
+ assertTrue(isOk);
+ isOk = tokenService.isTokenAcceptableWithUser(TokenName, "test1");
+ assertFalse(isOk);
+ }
+
+ @Test
+ void testIsTokenAcceptableWithHost() {
+ boolean isOk = tokenService.isTokenAcceptableWithHost(TokenName,
"127.0.0.1");
+ assertTrue(isOk);
+ isOk = tokenService.isTokenAcceptableWithHost(TokenName, "10.10.10.10");
+ assertFalse(isOk);
+ }
+
+ @Test
+ void testDoAuth() {
+ boolean isOk = tokenService.doAuth(TokenName, "test", "127.0.0.1");
+ assertTrue(isOk);
+
+ Exception exception =
+ assertThrows(
+ TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test1", "127.0.0.1"));
+ logger.info("assertThrows:{}", exception.getMessage());
+
+ exception =
+ assertThrows(
+ TokenAuthException.class, () -> tokenService.doAuth(TokenName,
"test", "10.10.10.10"));
+ logger.info("assertThrows:{}", exception.getMessage());
+ }
+}
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create.sql
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create.sql
index a72b41c12b..b9bd3fc575 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create.sql
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create.sql
@@ -37,10 +37,4 @@ DELETE FROM linkis_mg_gateway_auth_token;
-- ----------------------------
-- Default Tokens
-- ----------------------------
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('QML-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('LINKIS-AUTH','hadoop,test','127.0.0.1','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('WS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('dss-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('QUALITIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('VALIDATOR-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
-INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES ('LINKISCLI-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+INSERT INTO
`linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`)
VALUES
('LINKIS-UNAVAILABLE-TOKE','test','127.0.0.1','BDP',curdate(),curdate(),-1,'LINKIS');
\ No newline at end of file
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create_pg.sql
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create_pg.sql
index 33d45db6c8..a2f532ce15 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create_pg.sql
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/create_pg.sql
@@ -34,12 +34,4 @@ delete from linkis_mg_gateway_auth_token;
-- ----------------------------
-- Default Tokens
-- ----------------------------
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES (concat('QML-', md5(cast(random() as
varchar))),'*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('LINKIS-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('WS-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES (concat('DSS-', md5(cast(random() as
varchar))),'*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES (concat('QUALITIS-', md5(cast(random() as
varchar))),'*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES (concat('VALIDATOR-', md5(cast(random() as
varchar))),'*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES (concat('LINKISCLI-', md5(cast(random() as
varchar))),'*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('DSM-AUTH','*','*','BDP',now(),now(),-1,'LINKIS');
-INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES ('LINKIS_CLI_TEST','*','*','BDP',now(),now(),-1,'LINKIS');
\ No newline at end of file
+INSERT INTO
"linkis_mg_gateway_auth_token"("token_name","legal_users","legal_hosts","business_owner","create_time","update_time","elapse_day","update_by")
VALUES
('LINKIS-UNAVAILABLE-TOKE','test','127.0.0.1','BDP',now(),now(),-1,'LINKIS');
\ No newline at end of file
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala
index 5fc80d7afc..ccb7325b57 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala
@@ -42,6 +42,15 @@ object GatewayConfiguration {
val TOKEN_AUTHENTICATION_SCAN_INTERVAL =
CommonVars("wds.linkis.gateway.conf.token.auth.scan.interval", 1000 * 60 *
10)
+ val ENABLE_OAUTH_AUTHENTICATION =
CommonVars("wds.linkis.gateway.conf.enable.oauth.auth", false)
+ val OAUTH_AUTHENTICATION_URL =
CommonVars("wds.linkis.gateway.auth.oauth.authentication.url", "")
+ val OAUTH_EXCHANGE_URL =
CommonVars("wds.linkis.gateway.auth.oauth.exchange.url", "")
+ val OAUTH_VALIDATE_URL =
CommonVars("wds.linkis.gateway.auth.oauth.validate.url", "")
+ val OAUTH_VALIDATE_FIELD =
CommonVars("wds.linkis.gateway.auth.oauth.validate.field", "")
+ val OAUTH_CLIENT_ID = CommonVars("wds.linkis.gateway.auth.oauth.client.id",
"")
+ val OAUTH_CLIENT_SECRET =
CommonVars("wds.linkis.gateway.auth.oauth.client.secret", "")
+ val OAUTH_SCOPE = CommonVars("wds.linkis.gateway.auth.oauth.scope", "")
+
val PASS_AUTH_REQUEST_URI =
CommonVars("wds.linkis.gateway.conf.url.pass.auth",
"/dws/").getValue.split(",")
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala
index 150ae565ef..9f170e9dd2 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala
@@ -23,6 +23,7 @@ import org.apache.linkis.common.utils.{Logging, Utils}
import org.apache.linkis.gateway.config.GatewayConfiguration
import org.apache.linkis.gateway.config.GatewayConfiguration._
import org.apache.linkis.gateway.http.GatewayContext
+import org.apache.linkis.gateway.security.oauth.OAuth2Authentication
import org.apache.linkis.gateway.security.sso.SSOInterceptor
import org.apache.linkis.gateway.security.token.TokenAuthentication
import org.apache.linkis.server.{validateFailed, Message}
@@ -127,6 +128,8 @@ object SecurityFilter extends Logging {
logger.info("No login needed for proxy uri: " +
gatewayContext.getRequest.getRequestURI)
} else if (TokenAuthentication.isTokenRequest(gatewayContext)) {
TokenAuthentication.tokenAuth(gatewayContext)
+ } else if (OAuth2Authentication.isOAuth2Request(gatewayContext)) {
+ OAuth2Authentication.OAuth2Entry(gatewayContext)
} else {
val userName =
Utils.tryCatch(GatewaySSOUtils.getLoginUser(gatewayContext)) {
case n @ (_: NonLoginException | _: LoginExpireException) =>
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala
index 38d06b6b17..e79296c564 100644
---
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala
@@ -20,6 +20,7 @@ package org.apache.linkis.gateway.security
import org.apache.linkis.common.utils.{Logging, RSAUtils, Utils}
import org.apache.linkis.gateway.config.GatewayConfiguration
import org.apache.linkis.gateway.http.GatewayContext
+import org.apache.linkis.gateway.security.oauth.OAuth2Authentication
import org.apache.linkis.gateway.security.sso.SSOInterceptor
import org.apache.linkis.gateway.security.token.TokenAuthentication
import org.apache.linkis.protocol.usercontrol.{
@@ -87,6 +88,20 @@ abstract class AbstractUserRestful extends UserRestful with
Logging {
TokenAuthentication.tokenAuth(gatewayContext, true)
return
}
+ case "oauth-login" =>
+ Utils.tryCatch {
+ val loginUser = GatewaySSOUtils.getLoginUsername(gatewayContext)
+ Message
+ .ok(loginUser + " already logged in, please log out before signing
in(已经登录,请先退出再进行登录)!")
+ .data("userName", loginUser)
+ }(_ => {
+ OAuth2Authentication.OAuth2Auth(gatewayContext, true)
+ return
+ })
+ case "oauth-redirect" => {
+ OAuth2Authentication.OAuth2Redirect(gatewayContext)
+ return
+ }
case "logout" => logout(gatewayContext)
case "userInfo" => userInfo(gatewayContext)
case "publicKey" => publicKey(gatewayContext)
diff --git
a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/oauth/OAuth2Authentication.scala
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/oauth/OAuth2Authentication.scala
new file mode 100644
index 0000000000..c62ab5b3be
--- /dev/null
+++
b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/oauth/OAuth2Authentication.scala
@@ -0,0 +1,340 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.gateway.security.oauth
+
+import org.apache.linkis.common.exception.LinkisCommonErrorException
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.gateway.config.GatewayConfiguration
+import org.apache.linkis.gateway.config.GatewayConfiguration._
+import org.apache.linkis.gateway.http.GatewayContext
+import org.apache.linkis.gateway.security.{GatewaySSOUtils, SecurityFilter}
+import org.apache.linkis.server.Message
+import org.apache.linkis.server.conf.ServerConfiguration
+
+import org.apache.commons.io.IOUtils
+import org.apache.commons.lang3.StringUtils
+
+import java.io.IOException
+import java.net.{HttpURLConnection, URL}
+
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.fasterxml.jackson.module.scala.DefaultScalaModule
+
+object OAuth2Authentication extends Logging {
+
+ private val objectMapper = new ObjectMapper()
+ objectMapper.registerModule(DefaultScalaModule)
+
+ def isOAuth2Request(gatewayContext: GatewayContext): Boolean = {
+ val path = getMethod(gatewayContext)
+ path == "oauth_login" || path == "oauth_redirect"
+ }
+
+ def OAuth2Entry(gatewayContext: GatewayContext, login: Boolean = false):
Boolean = {
+ val path = getMethod(gatewayContext)
+ if (path == "oauth_redirect") {
+ OAuth2Redirect(gatewayContext)
+ } else if (path == "oauth_redirect") {
+ OAuth2Auth(gatewayContext, login)
+ } else {
+ val message =
+ Message.noLogin(s"未知 OAuth 请求") <<
gatewayContext.getRequest.getRequestURI
+ SecurityFilter.filterResponse(gatewayContext, message)
+ false
+ }
+ }
+
+ private def getMethod(gatewayContext: GatewayContext) = {
+ var userURI = ServerConfiguration.BDP_SERVER_USER_URI.getValue
+ if (!userURI.endsWith("/")) userURI += "/"
+ val path = gatewayContext.getRequest.getRequestURI.replace(userURI, "")
+ path
+ }
+
+ def OAuth2Redirect(gatewayContext: GatewayContext): Boolean = {
+ if (!ENABLE_OAUTH_AUTHENTICATION.getValue) {
+ val message =
+ Message.noLogin(
+ s"Gateway 未启用 OAuth 认证,请采用其他认证方式!"
+ ) << gatewayContext.getRequest.getRequestURI
+ SecurityFilter.filterResponse(gatewayContext, message)
+ return false
+ }
+ val message =
+ Message.ok("创建链接成功!").data("redirectUrl", generateAuthenticationUrl())
+ SecurityFilter.filterResponse(gatewayContext, message)
+ true
+ }
+
+ /**
+ * 生成OAuth认证的URL
+ *
+ * @note
+ * 认证完成回调链接需要在认证服务器上进行配置
+ * @return
+ */
+ private def generateAuthenticationUrl(): String = {
+ var oauthServerUrl =
+
s"${OAUTH_AUTHENTICATION_URL.getValue}?client_id=${OAUTH_CLIENT_ID.getValue}&response_type=code"
+ if (StringUtils.isNotBlank(OAUTH_SCOPE.getValue)) {
+ oauthServerUrl += s"&scope=${OAUTH_SCOPE.getValue}"
+ }
+ oauthServerUrl
+ }
+
+ def OAuth2Auth(gatewayContext: GatewayContext, login: Boolean = false):
Boolean = {
+ if (!ENABLE_OAUTH_AUTHENTICATION.getValue) {
+ val message =
+ Message.noLogin(
+ s"Gateway 未启用 OAuth 认证,请采用其他认证方式!"
+ ) << gatewayContext.getRequest.getRequestURI
+ SecurityFilter.filterResponse(gatewayContext, message)
+ return false
+ }
+
+ val code = extractCode(gatewayContext)
+ val host = gatewayContext.getRequest.getRequestRealIpAddr()
+
+ if (StringUtils.isBlank(code)) {
+ val message =
+ Message.noLogin(s"请在回调查询参数中返回code,以便完成OAuth认证!") <<
gatewayContext.getRequest.getRequestURI
+ SecurityFilter.filterResponse(gatewayContext, message)
+ return false
+ }
+
+ var authMsg: Message =
+ Message.noLogin(s"无效的访问令牌 $code,无法完成 OAuth 认证!") <<
gatewayContext.getRequest.getRequestURI
+
+ val accessToken = Utils.tryCatch(exchangeAccessToken(code, host))(t => {
+ authMsg = Message.noLogin(
+ s"OAuth exchange failed, code: $code, reason: ${t.getMessage}"
+ ) << gatewayContext.getRequest.getRequestURI
+ null
+ })
+
+ if (StringUtils.isNotBlank(accessToken)) {
+ val username = validateAccessToken(accessToken, host)
+ logger.info(
+ s"OAuth authentication succeed, uri:
${gatewayContext.getRequest.getRequestURI}, accessToken: $accessToken,
username: $username."
+ )
+
+ if (login) {
+ GatewaySSOUtils.setLoginUser(gatewayContext, username)
+ val msg =
+ Message
+ .ok("login successful(登录成功)!")
+ .data("userName", username)
+ .data("enableWatermark",
GatewayConfiguration.ENABLE_WATER_MARK.getValue)
+ .data("isAdmin", false)
+ SecurityFilter.filterResponse(gatewayContext, msg)
+ return true
+ }
+
+ GatewaySSOUtils.setLoginUser(gatewayContext.getRequest, username)
+ true
+ } else {
+ logger.info(
+ s"OAuth exchange fail, uri:
${gatewayContext.getRequest.getRequestURI}, code: $code, host: $host."
+ )
+ SecurityFilter.filterResponse(gatewayContext, authMsg)
+ false
+ }
+ }
+
+ private def extractCode(gatewayContext: GatewayContext): String = {
+ Utils.tryCatch(gatewayContext.getRequest.getQueryParams.get("code")(0))(_
=> null)
+ }
+
+ /**
+ * 验证访问码的有效性并获取访问令牌
+ *
+ * @param code
+ * 访问码
+ * @param host
+ * 客户端主机
+ * @return
+ * 访问令牌
+ */
+ private def exchangeAccessToken(code: String, host: String): String = {
+ val exchangeUrl = OAUTH_EXCHANGE_URL.getValue
+
+ if (StringUtils.isBlank(exchangeUrl)) {
+ logger.warn(s"OAuth exchange url is not set")
+ }
+ if (StringUtils.isBlank(code)) {
+ logger.warn(s"OAuth exchange code is empty")
+ }
+
+ Utils.tryCatch({
+ val response = HttpUtils.post(
+ exchangeUrl,
+ data = objectMapper.writeValueAsString(
+ Map(
+ "client_id" -> OAUTH_CLIENT_ID.getValue,
+ "client_secret" -> OAUTH_CLIENT_SECRET.getValue,
+ "code" -> code,
+ "host" -> host
+ )
+ )
+ )
+ objectMapper.readValue(response, classOf[Map[String,
String]]).get("access_token").orNull
+ })(t => {
+ logger.warn(s"OAuth exchange failed, url: $exchangeUrl, reason:
${t.getMessage}")
+ null
+ })
+ }
+
+ /**
+ * 验证访问令牌的有效性并兑换用户名
+ *
+ * @param accessToken
+ * 访问令牌
+ * @param host
+ * 客户端主机
+ * @return
+ * 用户名
+ */
+ private def validateAccessToken(accessToken: String, host: String): String =
{
+ val url = OAUTH_VALIDATE_URL.getValue
+
+ if (StringUtils.isBlank(url)) {
+ logger.warn(s"OAuth validate url is not set")
+ }
+
+ if (StringUtils.isBlank(accessToken)) {
+ logger.warn(s"OAuth validate accessToken is empty")
+ }
+
+ Utils.tryCatch({
+ val response = HttpUtils.get(url, headers = Map("Authorization" ->
s"Bearer $accessToken"))
+ objectMapper
+ .readValue(response, classOf[Map[String, String]])
+ .get(OAUTH_VALIDATE_FIELD.getValue)
+ .orNull
+ })(t => {
+ logger.warn(s"OAuth validate failed, url: $url, reason: ${t.getMessage}")
+ null
+ })
+ }
+
+}
+
+object HttpUtils extends Logging {
+
+ def get(
+ url: String,
+ headers: Map[String, String] = Map.empty,
+ params: Map[String, String] = Map.empty
+ ): String = {
+ Utils.tryCatch {
+ val fullUrl = url + (if (params.nonEmpty) {
+ "?" + params.map { case (key, value) =>
s"$key=$value" }.mkString("&")
+ } else {
+ ""
+ })
+ val connection = new
URL(fullUrl).openConnection().asInstanceOf[HttpURLConnection]
+ connection.setRequestMethod("GET")
+
+ headers.foreach { case (key, value) =>
+ connection.setRequestProperty(key, value)
+ }
+
+ if (!headers.contains("Accept")) {
+ connection.setRequestProperty("Accept", "application/json")
+ }
+
+ val responseCode = connection.getResponseCode
+ if (!(responseCode >= 200 && responseCode < 300)) {
+ throw new IOException(s"HTTP GET request failed for URL: $url -
$responseCode")
+ }
+
+ val inputStream = connection.getInputStream
+
+ try {
+ IOUtils.toString(inputStream, "UTF-8")
+ } finally {
+ inputStream.close()
+ connection.disconnect()
+ }
+ } { t =>
+ logger.warn(s"Failed to execute HTTP GET request to $url", t)
+ throw new LinkisCommonErrorException(
+ 0,
+ s"HTTP GET request failed for URL: $url, reason: ${t.getMessage}"
+ )
+ }
+ }
+
+ def post(url: String, data: String, headers: Map[String, String] =
Map.empty): String = {
+ Utils.tryCatch {
+ val connection = new
URL(url).openConnection().asInstanceOf[HttpURLConnection]
+ try {
+ connection.setRequestMethod("POST")
+ connection.setDoOutput(true)
+ connection.setDoInput(true)
+
+ headers.foreach { case (key, value) =>
+ connection.setRequestProperty(key, value)
+ }
+
+ if (!headers.contains("Content-Type")) {
+ connection.setRequestProperty("Content-Type", "application/json;
charset=UTF-8")
+ }
+
+ if (!headers.contains("Accept")) {
+ connection.setRequestProperty("Accept", "application/json")
+ }
+
+ if (data != null && data.nonEmpty) {
+ val outputStream = connection.getOutputStream
+ try {
+ IOUtils.write(data, outputStream, "UTF-8")
+ } finally {
+ outputStream.close()
+ }
+ }
+
+ val responseCode = connection.getResponseCode
+ if (!(responseCode >= 200 && responseCode < 300)) {
+ throw new IOException(s"HTTP POST request failed for URL: $url -
$responseCode")
+ }
+
+ val inputStream = connection.getInputStream
+
+ try {
+ if (inputStream != null) {
+ IOUtils.toString(inputStream, "UTF-8")
+ } else {
+ ""
+ }
+ } finally {
+ if (inputStream != null) inputStream.close()
+ }
+ } finally {
+ connection.disconnect()
+ }
+ } { t =>
+ logger.warn(s"Failed to execute HTTP POST request to $url", t)
+ throw new LinkisCommonErrorException(
+ 0,
+ s"HTTP POST request failed for URL: $url, reason: ${t.getMessage}"
+ )
+ }
+ }
+
+}
diff --git a/linkis-web/.env b/linkis-web/.env
index ca995c5f50..4660cdface 100644
--- a/linkis-web/.env
+++ b/linkis-web/.env
@@ -2,4 +2,4 @@ VUE_APP_HOST=
BACKEND_URL=http://127.0.0.1:9001
VUE_APP_MN_CONFIG_PREFIX=
VUE_APP_MN_CONFIG_SOCKET=/ws/api/entrance/connect
-VUE_APP_VERSION=1.7.0
+VUE_APP_VERSION=1.8.0
diff --git a/linkis-web/package.json b/linkis-web/package.json
index d59a5ad0e1..4ee88e7d5e 100644
--- a/linkis-web/package.json
+++ b/linkis-web/package.json
@@ -1,6 +1,6 @@
{
"name": "linkis",
- "version": "1.7.0",
+ "version": "1.8.0",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
diff --git a/linkis-web/src/common/i18n/en.json
b/linkis-web/src/common/i18n/en.json
index aac078b18a..23b21bca44 100644
--- a/linkis-web/src/common/i18n/en.json
+++ b/linkis-web/src/common/i18n/en.json
@@ -265,6 +265,7 @@
"userName": "Please enter your username",
"remenber": "Remember me",
"login": "Login",
+ "oauthLogin": "OAuth Login",
"passwordHint": "Please enter your password",
"password": "Please enter password!",
"loginSuccess": "Login Success",
diff --git a/linkis-web/src/common/i18n/zh.json
b/linkis-web/src/common/i18n/zh.json
index 688153101e..cc4c24e0c2 100644
--- a/linkis-web/src/common/i18n/zh.json
+++ b/linkis-web/src/common/i18n/zh.json
@@ -266,6 +266,7 @@
"userName": "请输入用户名",
"remenber": "记住当前用户",
"login": "登录",
+ "oauthLogin": "OAuth 登录",
"passwordHint": "请输入密码!",
"loginSuccess": "登录成功",
"haveLogin": "您已经登录,请不要重复登录",
diff --git a/linkis-web/src/dss/router.js b/linkis-web/src/dss/router.js
index 01b5ede649..bac6af2994 100644
--- a/linkis-web/src/dss/router.js
+++ b/linkis-web/src/dss/router.js
@@ -61,6 +61,16 @@ export default [
component: () =>
import('./view/login/index.vue'),
},
+ {
+ path: '/login/oauth/callback',
+ name: 'OAuthCallback',
+ meta: {
+ title: 'OAuthCallback',
+ publicPage: true,
+ },
+ component: () =>
+ import('./view/login/oauthCallback.vue'),
+ },
// Public pages, not subject to permission control(公用页面,不受权限控制)
{
path: '/500',
diff --git a/linkis-web/src/dss/view/login/index.vue
b/linkis-web/src/dss/view/login/index.vue
index 81c6af0bdb..c3ec243b21 100644
--- a/linkis-web/src/dss/view/login/index.vue
+++ b/linkis-web/src/dss/view/login/index.vue
@@ -20,7 +20,7 @@
class="login"
@keyup.enter.stop.prevent="handleSubmit('loginForm')">
<i class="login-bg"/>
- <div class="login-main">
+ <div class="login-main" :style="{height: OAuthRedirectUrl ? '500px' : ''}">
<Form
ref="loginForm"
:model="loginForm"
@@ -56,6 +56,14 @@
size="large"
@click="handleSubmit('loginForm')">{{$t('message.common.login.login')}}</Button>
</FormItem>
+ <FormItem>
+ <Button
+ v-if="OAuthRedirectUrl"
+ type="primary"
+ long
+ size="large"
+
@click="handleOAuthLogin()">{{$t('message.common.login.oauthLogin')}}</Button>
+ </FormItem>
</Form>
</div>
</div>
@@ -71,6 +79,7 @@ export default {
data() {
return {
loading: false,
+ OAuthRedirectUrl: null,
loginForm: {
user: '',
password: '',
@@ -97,6 +106,7 @@ export default {
this.loginForm.password = userNameAndPass.split('&')[1];
}
this.getPublicKey();
+ this.checkOAuthStatus();
},
mounted() {
},
@@ -179,6 +189,15 @@ export default {
clearSession() {
storage.clear();
},
+ // check OAuth status(检查OAuth状态)
+ checkOAuthStatus() {
+ api.fetch('/user/oauth-redirect', {}, 'get').then((res) => {
+ this.OAuthRedirectUrl = res.redirectUrl;
+ })
+ },
+ handleOAuthLogin() {
+ window.location.href = this.OAuthRedirectUrl;
+ },
},
};
</script>
diff --git a/linkis-web/src/dss/view/login/oauthCallback.vue
b/linkis-web/src/dss/view/login/oauthCallback.vue
new file mode 100644
index 0000000000..e81bfe7bd8
--- /dev/null
+++ b/linkis-web/src/dss/view/login/oauthCallback.vue
@@ -0,0 +1,55 @@
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one or more
+ ~ contributor license agreements. See the NOTICE file distributed with
+ ~ this work for additional information regarding copyright ownership.
+ ~ The ASF licenses this file to You under the Apache License, Version 2.0
+ ~ (the "License"); you may not use this file except in compliance with
+ ~ the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<template>
+ <div></div>
+</template>
+<script>
+import api from '@/common/service/api';
+import storage from '@/common/helper/storage';
+export default {
+ data() {
+ return {};
+ },
+ created() {
+ if (this.$route.query.code) {
+ api.fetch('/user/oauth-login', { code: this.$route.query.code },
'post').then((res) => {
+ if (res) {
+ this.userName = res.userName;
+ storage.set('userName', res.userName, 'session')
+ storage.set('enableWatermark', res.enableWatermark ? true : false,
'session')
+ this.$router.push({ path: '/console' });
+ this.$Message.success(this.$t('message.common.login.loginSuccess'));
+ }
+ }).catch((err) => {
+ if (err.message.indexOf('已经登录,请先退出再进行登录') !== -1) {
+ this.getPageHomeUrl().then(() => {
+ this.$router.push({ path: '/' });
+ })
+ } else {
+ this.$Message.error(this.$t('message.common.login.vaildFaild'));
+ this.$router.push({ path: '/login' })
+ }
+ });
+ }
+ },
+ mounted() {
+ },
+ methods: {
+ },
+};
+</script>
diff --git a/pom.xml b/pom.xml
index 880fae55b6..4f135041be 100644
--- a/pom.xml
+++ b/pom.xml
@@ -102,7 +102,7 @@
</snapshotRepository>
</distributionManagement>
<properties>
- <revision>1.7.0</revision>
+ <revision>1.8.0</revision>
<zookeeper.version>3.9.4</zookeeper.version>
<curator.version>4.2.0</curator.version>
@@ -227,6 +227,7 @@
<spring-cloud.version>2021.0.8</spring-cloud.version>
<spring-cloud-alibaba.version>2021.0.6.0</spring-cloud-alibaba.version>
<spring-cloud-common.version>3.1.7</spring-cloud-common.version>
+ <azure.blob.bom>1.2.30</azure.blob.bom>
<!-- platform encoding override -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@@ -1378,6 +1379,13 @@
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
<version>${spring-cloud-alibaba.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-sdk-bom</artifactId>
+ <version>${azure.blob.bom}</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
</dependencies>
</dependencyManagement>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]