This is an automated email from the ASF dual-hosted git repository. jianglongtao pushed a commit to branch fix-33341 in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
commit 96c5337e014b87fe028c508886e808ddbe66a887 Author: RaigorJiang <[email protected]> AuthorDate: Tue Aug 6 19:49:46 2024 +0800 Support only alter properties of hikari --- .../rdl/resource/AlterStorageUnitExecutor.java | 20 +++- .../rdl/resource/RegisterStorageUnitExecutor.java | 14 +++ .../apache/shardingsphere/infra/util/SphereEx.java | 42 +++++++++ mode/core/pom.xml | 5 + .../context/ConfigurationContextManager.java | 103 +++++++++++++++++++++ .../engine/src/main/antlr4/imports/RDLStatement.g4 | 6 +- .../core/kernel/KernelDistSQLStatementVisitor.java | 19 +++- .../segment/AlterPoolPropertiesSegment.java | 35 +++++++ .../converter/DataSourceSegmentsConverter.java | 84 +++++++++++++++++ 9 files changed, 319 insertions(+), 9 deletions(-) diff --git a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java index 11f5fabffcf..1ae36007c45 100644 --- a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java +++ b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java @@ -22,6 +22,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.distsql.handler.aware.DistSQLExecutorDatabaseAware; import org.apache.shardingsphere.distsql.handler.engine.update.DistSQLUpdateExecutor; import org.apache.shardingsphere.distsql.handler.validate.DistSQLDataSourcePoolPropertiesValidator; +import org.apache.shardingsphere.distsql.segment.AlterPoolPropertiesSegment; import org.apache.shardingsphere.distsql.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment; @@ -40,10 +41,13 @@ import org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storag import org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storageunit.StorageUnitsOperateException; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit; +import org.apache.shardingsphere.infra.util.SphereEx; +import org.apache.shardingsphere.infra.util.SphereEx.Type; import org.apache.shardingsphere.mode.manager.ContextManager; import java.sql.SQLException; import java.util.Collection; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; @@ -60,11 +64,14 @@ public final class AlterStorageUnitExecutor implements DistSQLUpdateExecutor<Alt private ShardingSphereDatabase database; + @SphereEx(Type.MODIFY) @Override public void executeUpdate(final AlterStorageUnitStatement sqlStatement, final ContextManager contextManager) { checkBefore(sqlStatement); - Map<String, DataSourcePoolProperties> propsMap = DataSourceSegmentsConverter.convert(database.getProtocolType(), sqlStatement.getStorageUnits()); - validateHandler.validate(propsMap, getExpectedPrivileges(sqlStatement)); + Map<String, DataSourcePoolProperties> propsMap = DataSourceSegmentsConverter.convert(database.getProtocolType(), database.getResourceMetaData(), sqlStatement.getStorageUnits()); + if (!sqlStatement.getStorageUnits().stream().allMatch(each -> each instanceof AlterPoolPropertiesSegment)) { + validateHandler.validate(propsMap, getExpectedPrivileges(sqlStatement)); + } try { contextManager.getInstanceContext().getModeContextManager().alterStorageUnits(database.getName(), propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { @@ -89,8 +96,15 @@ public final class AlterStorageUnitExecutor implements DistSQLUpdateExecutor<Alt ShardingSpherePreconditions.checkState(notExistedStorageUnitNames.isEmpty(), () -> new MissingRequiredStorageUnitsException(database.getName(), notExistedStorageUnitNames)); } + @SphereEx(Type.MODIFY) private void checkDatabase(final AlterStorageUnitStatement sqlStatement) { - Collection<String> invalidStorageUnitNames = sqlStatement.getStorageUnits().stream().collect(Collectors.toMap(DataSourceSegment::getName, each -> each)).entrySet().stream() + Map<String, DataSourceSegment> toBeCheckedSegments = new LinkedHashMap<>(sqlStatement.getStorageUnits().size(), 1F); + for (DataSourceSegment each : sqlStatement.getStorageUnits()) { + if (each instanceof HostnameAndPortBasedDataSourceSegment || each instanceof URLBasedDataSourceSegment) { + toBeCheckedSegments.put(each.getName(), each); + } + } + Collection<String> invalidStorageUnitNames = toBeCheckedSegments.entrySet().stream() .filter(each -> !isSameDatabase(each.getValue(), database.getResourceMetaData().getStorageUnits().get(each.getKey()))).map(Entry::getKey).collect(Collectors.toSet()); ShardingSpherePreconditions.checkState(invalidStorageUnitNames.isEmpty(), () -> new AlterStorageUnitConnectionInfoException(invalidStorageUnitNames)); } diff --git a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java index f343abba791..5f2afb57d40 100644 --- a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java +++ b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java @@ -23,6 +23,8 @@ import org.apache.shardingsphere.distsql.handler.aware.DistSQLExecutorDatabaseAw import org.apache.shardingsphere.distsql.handler.engine.update.DistSQLUpdateExecutor; import org.apache.shardingsphere.distsql.handler.validate.DistSQLDataSourcePoolPropertiesValidator; import org.apache.shardingsphere.distsql.segment.DataSourceSegment; +import org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment; +import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.distsql.segment.converter.DataSourceSegmentsConverter; import org.apache.shardingsphere.distsql.statement.rdl.resource.unit.type.RegisterStorageUnitStatement; import org.apache.shardingsphere.infra.database.core.checker.PrivilegeCheckType; @@ -33,6 +35,7 @@ import org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storag import org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storageunit.StorageUnitsOperateException; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.rule.attribute.datasource.DataSourceMapperRuleAttribute; +import org.apache.shardingsphere.infra.util.SphereEx; import org.apache.shardingsphere.mode.manager.ContextManager; import java.sql.SQLException; @@ -55,6 +58,9 @@ public final class RegisterStorageUnitExecutor implements DistSQLUpdateExecutor< @Override public void executeUpdate(final RegisterStorageUnitStatement sqlStatement, final ContextManager contextManager) { + // SPEX ADDED: BEGIN + checkSegmentType(sqlStatement); + // SPEX ADDED: END checkDataSource(sqlStatement, contextManager); Map<String, DataSourcePoolProperties> propsMap = DataSourceSegmentsConverter.convert(database.getProtocolType(), sqlStatement.getStorageUnits()); if (sqlStatement.isIfNotExists()) { @@ -74,6 +80,14 @@ public final class RegisterStorageUnitExecutor implements DistSQLUpdateExecutor< } } + @SphereEx + private void checkSegmentType(final RegisterStorageUnitStatement sqlStatement) { + for (DataSourceSegment each : sqlStatement.getStorageUnits()) { + ShardingSpherePreconditions.checkState(each instanceof HostnameAndPortBasedDataSourceSegment || each instanceof URLBasedDataSourceSegment, + () -> new UnsupportedOperationException(String.format("Missing connection information for register storage unit %s.", each.getName()))); + } + } + private void checkDataSource(final RegisterStorageUnitStatement sqlStatement, final ContextManager contextManager) { if (!sqlStatement.isIfNotExists()) { Collection<String> dataSourceNames = new ArrayList<>(sqlStatement.getStorageUnits().size()); diff --git a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/SphereEx.java b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/SphereEx.java new file mode 100644 index 00000000000..11f82922be5 --- /dev/null +++ b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/SphereEx.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.util; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * SphereEx commercial annotation. + */ +@Target({ElementType.TYPE, ElementType.CONSTRUCTOR, ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER, ElementType.LOCAL_VARIABLE}) +@Retention(RetentionPolicy.SOURCE) +public @interface SphereEx { + + /** + * Change type. + * + * @return change type + */ + Type value() default Type.ADD; + + enum Type { + ADD, MODIFY + } +} diff --git a/mode/core/pom.xml b/mode/core/pom.xml index e5d3cb69c6e..e0c6d8c7f9c 100644 --- a/mode/core/pom.xml +++ b/mode/core/pom.xml @@ -42,6 +42,11 @@ <artifactId>shardingsphere-metadata-core</artifactId> <version>${project.version}</version> </dependency> + <dependency> + <groupId>com.zaxxer</groupId> + <artifactId>HikariCP</artifactId> + <scope>compile</scope> + </dependency> <dependency> <groupId>org.apache.shardingsphere</groupId> diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java index 8f281988218..a4c5db3a570 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java @@ -17,6 +17,8 @@ package org.apache.shardingsphere.mode.manager.context; +import com.zaxxer.hikari.HikariConfigMXBean; +import com.zaxxer.hikari.HikariDataSource; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -25,7 +27,10 @@ import org.apache.shardingsphere.infra.config.database.impl.DataSourceProvidedDa import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.scope.DatabaseRuleConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.ConnectionPropertySynonyms; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; @@ -38,6 +43,7 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSp import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.rule.builder.database.DatabaseRulesBuilder; import org.apache.shardingsphere.infra.rule.builder.global.GlobalRulesBuilder; +import org.apache.shardingsphere.infra.util.SphereEx; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlDataNodeGlobalRuleConfigurationSwapper; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlDataNodeGlobalRuleConfigurationSwapperEngine; import org.apache.shardingsphere.metadata.factory.ExternalMetaDataFactory; @@ -100,12 +106,109 @@ public final class ConfigurationContextManager { closeStaleRules(databaseName); SwitchingResource switchingResource = new ResourceSwitchManager().alterStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); + // SPEX ADDED: BEGIN + ResourceMetaData resourceMetaData = metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(); + if (isPoolPropsChangedOnly(resourceMetaData, propsMap)) { + alterDataSourcePoolPros(resourceMetaData, switchingResource, propsMap); + closeNewDataSources(switchingResource); + return; + } + // SPEX ADDED: END buildNewMetaDataContext(databaseName, switchingResource, false); } catch (final SQLException ex) { log.error("Alter database: {} register storage unit failed", databaseName, ex); } } + @SphereEx + private boolean isPoolPropsChangedOnly(final ResourceMetaData resourceMetaData, final Map<String, DataSourcePoolProperties> propsMap) { + for (Entry<String, DataSourcePoolProperties> entry : propsMap.entrySet()) { + if (!resourceMetaData.getStorageUnits().containsKey(entry.getKey())) { + continue; + } + ConnectionPropertySynonyms currentConnectionProps = resourceMetaData.getStorageUnits().get(entry.getKey()).getDataSourcePoolProperties().getConnectionPropertySynonyms(); + ConnectionPropertySynonyms newConnectionProps = entry.getValue().getConnectionPropertySynonyms(); + if (!currentConnectionProps.getStandardProperties().get("url").equals(newConnectionProps.getStandardProperties().get("url"))) { + return false; + } + if (!currentConnectionProps.getStandardProperties().get("username").equals(newConnectionProps.getStandardProperties().get("username"))) { + return false; + } + if (!currentConnectionProps.getStandardProperties().get("password").equals(newConnectionProps.getStandardProperties().get("password"))) { + return false; + } + } + return true; + } + + @SphereEx + private void alterDataSourcePoolPros(final ResourceMetaData resourceMetaData, final SwitchingResource switchingResource, final Map<String, DataSourcePoolProperties> propsMap) { + for (Entry<String, DataSourcePoolProperties> entry : propsMap.entrySet()) { + StorageUnit storageUnit = resourceMetaData.getStorageUnits().get(entry.getKey()); + if (null == storageUnit) { + continue; + } + if (!switchingResource.getNewDataSources().containsKey(storageUnit.getStorageNode())) { + continue; + } + DataSource newDataSource = switchingResource.getNewDataSources().get(storageUnit.getStorageNode()); + ShardingSpherePreconditions.checkState(newDataSource instanceof HikariDataSource, + () -> new UnsupportedOperationException("The new data source must be HikariDataSource when alter pool properties.")); + if (!((HikariDataSource) newDataSource).getJdbcUrl().equals(entry.getValue().getConnectionPropertySynonyms().getStandardProperties().get("url"))) { + continue; + } + DataSource staleDataSource = switchingResource.getStaleDataSources().get(storageUnit.getStorageNode()); + if (null == staleDataSource) { + continue; + } + ShardingSpherePreconditions.checkState(staleDataSource instanceof HikariDataSource, + () -> new UnsupportedOperationException("The stale data source must be HikariDataSource when alter pool properties.")); + HikariConfigMXBean configMXBean = ((HikariDataSource) staleDataSource).getHikariConfigMXBean(); + HikariConfigMXBean newConfigMXBean = ((HikariDataSource) newDataSource).getHikariConfigMXBean(); + if (newConfigMXBean.getMaximumPoolSize() != configMXBean.getMaximumPoolSize()) { + log.warn("Update maxPoolSize of pool `{}` from {} to {}", configMXBean.getPoolName(), configMXBean.getMaximumPoolSize(), newConfigMXBean.getMaximumPoolSize()); + configMXBean.setMaximumPoolSize(newConfigMXBean.getMaximumPoolSize()); + } + if (newConfigMXBean.getMinimumIdle() != configMXBean.getMinimumIdle()) { + log.warn("Update minIdle of pool `{}` from {} to {}", configMXBean.getPoolName(), configMXBean.getMinimumIdle(), newConfigMXBean.getMinimumIdle()); + configMXBean.setMinimumIdle(newConfigMXBean.getMinimumIdle()); + } + if (newConfigMXBean.getConnectionTimeout() != configMXBean.getConnectionTimeout()) { + log.warn("Update connectionTimeoutMs of pool `{}` from {} to {}", configMXBean.getPoolName(), configMXBean.getConnectionTimeout(), newConfigMXBean.getConnectionTimeout()); + configMXBean.setConnectionTimeout(newConfigMXBean.getConnectionTimeout()); + } + if (newConfigMXBean.getIdleTimeout() != configMXBean.getIdleTimeout()) { + log.warn("Update idleTimeoutMs of pool `{}` from {} to {}", configMXBean.getPoolName(), configMXBean.getIdleTimeout(), newConfigMXBean.getIdleTimeout()); + configMXBean.setIdleTimeout(newConfigMXBean.getIdleTimeout()); + } + if (newConfigMXBean.getMaxLifetime() != configMXBean.getMaxLifetime()) { + log.warn("Update maxLifetimeMs of pool `{}` from {} to {}", configMXBean.getPoolName(), configMXBean.getMaxLifetime(), newConfigMXBean.getMaxLifetime()); + configMXBean.setMaxLifetime(newConfigMXBean.getMaxLifetime()); + } + } + for (Entry<String, StorageUnit> entry : resourceMetaData.getStorageUnits().entrySet()) { + DataSourcePoolProperties newDataSourceProperties = switchingResource.getMergedDataSourcePoolPropertiesMap().get(entry.getKey()); + if (null != newDataSourceProperties) { + alterDataSourceProperties(entry.getValue(), newDataSourceProperties); + } + } + } + + @SphereEx + private void alterDataSourceProperties(final StorageUnit storageUnit, final DataSourcePoolProperties newDataSourceProperties) { + storageUnit.getDataSourcePoolProperties().getPoolPropertySynonyms().getStandardProperties().clear(); + storageUnit.getDataSourcePoolProperties().getPoolPropertySynonyms().getStandardProperties().putAll(newDataSourceProperties.getPoolPropertySynonyms().getStandardProperties()); + } + + @SphereEx + private void closeNewDataSources(final SwitchingResource switchingResource) { + for (Entry<StorageNode, DataSource> entry : switchingResource.getNewDataSources().entrySet()) { + if (null != entry.getValue() && switchingResource.getStaleDataSources().containsKey(entry.getKey())) { + new DataSourcePoolDestroyer(entry.getValue()).asyncDestroy(); + } + } + } + /** * UnRegister storage unit. * diff --git a/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4 b/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4 index 5dfb4f7a0bd..f65a26f1e3f 100644 --- a/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4 +++ b/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4 @@ -36,7 +36,11 @@ storageUnitsDefinition ; storageUnitDefinition - : storageUnitName LP_ (simpleSource | urlSource) COMMA_ USER EQ_ user (COMMA_ PASSWORD EQ_ password)? (COMMA_ propertiesDefinition)? RP_ + : storageUnitName LP_ storageUnitConnectionDefinition? (COMMA_? propertiesDefinition)? RP_ + ; + +storageUnitConnectionDefinition + : (simpleSource | urlSource) COMMA_ USER EQ_ user (COMMA_ PASSWORD EQ_ password)? ; simpleSource diff --git a/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java b/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java index 67f09b2f851..26e3b029b2b 100644 --- a/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java +++ b/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java @@ -58,12 +58,14 @@ import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementPa import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.ShowRulesUsedStorageUnitContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.ShowStorageUnitsContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.ShowTableMetadataContext; +import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.StorageUnitConnectionDefinitionContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.StorageUnitDefinitionContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.StorageUnitsDefinitionContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UnlabelComputeNodeContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UnlockClusterContext; import org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UnregisterStorageUnitContext; import org.apache.shardingsphere.distsql.segment.AlgorithmSegment; +import org.apache.shardingsphere.distsql.segment.AlterPoolPropertiesSegment; import org.apache.shardingsphere.distsql.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment; @@ -95,6 +97,8 @@ import org.apache.shardingsphere.distsql.statement.rdl.resource.unit.type.Unregi import org.apache.shardingsphere.distsql.statement.rql.resource.ShowLogicalTablesStatement; import org.apache.shardingsphere.distsql.statement.rql.resource.ShowStorageUnitsStatement; import org.apache.shardingsphere.distsql.statement.rql.rule.database.ShowRulesUsedStorageUnitStatement; +import org.apache.shardingsphere.infra.util.SphereEx; +import org.apache.shardingsphere.infra.util.SphereEx.Type; import org.apache.shardingsphere.sql.parser.api.ASTNode; import org.apache.shardingsphere.sql.parser.api.visitor.SQLVisitor; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DatabaseSegment; @@ -136,15 +140,20 @@ public final class KernelDistSQLStatementVisitor extends KernelDistSQLStatementB return new ShowTableMetaDataStatement(tableNames, null == ctx.databaseName() ? null : (DatabaseSegment) visit(ctx.databaseName())); } + @SphereEx(Type.MODIFY) @Override public ASTNode visitStorageUnitDefinition(final StorageUnitDefinitionContext ctx) { - String user = getIdentifierValue(ctx.user()); - String password = null == ctx.password() ? "" : getPassword(ctx.password()); Properties props = getProperties(ctx.propertiesDefinition()); - return null == ctx.urlSource() + if (null == ctx.storageUnitConnectionDefinition()) { + return new AlterPoolPropertiesSegment(getIdentifierValue(ctx.storageUnitName()), props); + } + StorageUnitConnectionDefinitionContext connectionCtx = ctx.storageUnitConnectionDefinition(); + String user = getIdentifierValue(connectionCtx.user()); + String password = null == connectionCtx.password() ? "" : getPassword(connectionCtx.password()); + return null == connectionCtx.urlSource() ? new HostnameAndPortBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), - getIdentifierValue(ctx.simpleSource().hostname()), ctx.simpleSource().port().getText(), getIdentifierValue(ctx.simpleSource().dbName()), user, password, props) - : new URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), getIdentifierValue(ctx.urlSource().url()), user, password, props); + getIdentifierValue(connectionCtx.simpleSource().hostname()), connectionCtx.simpleSource().port().getText(), getIdentifierValue(connectionCtx.simpleSource().dbName()), user, password, props) + : new URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), getIdentifierValue(connectionCtx.urlSource().url()), user, password, props); } private String getPassword(final PasswordContext ctx) { diff --git a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/AlterPoolPropertiesSegment.java b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/AlterPoolPropertiesSegment.java new file mode 100644 index 00000000000..935c61179be --- /dev/null +++ b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/AlterPoolPropertiesSegment.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.distsql.segment; + +import lombok.Getter; +import org.apache.shardingsphere.infra.util.SphereEx; + +import java.util.Properties; + +/** + * Alter pool properties segment. + */ +@SphereEx +@Getter +public final class AlterPoolPropertiesSegment extends DataSourceSegment { + + public AlterPoolPropertiesSegment(final String name, final Properties props) { + super(name, null, null, props); + } +} diff --git a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java index 56e1a702b8c..b7e03665063 100644 --- a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java +++ b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java @@ -19,19 +19,28 @@ package org.apache.shardingsphere.distsql.segment.converter; import lombok.AccessLevel; import lombok.NoArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.shardingsphere.distsql.segment.AlterPoolPropertiesSegment; import org.apache.shardingsphere.distsql.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit; +import org.apache.shardingsphere.infra.util.SphereEx; import java.util.Collection; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; /** * Data source segments converter. */ +@Slf4j @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class DataSourceSegmentsConverter { @@ -49,6 +58,81 @@ public final class DataSourceSegmentsConverter { } return result; } + /** + * Convert data source segments to data source properties map. + * + * @param databaseType database type + * @param resourceMetaData data source segments + * @param dataSourceSegments resource meta data + * @return data source properties map + */ + @SphereEx + public static Map<String, DataSourcePoolProperties> convert(final DatabaseType databaseType, final ResourceMetaData resourceMetaData, + final Collection<DataSourceSegment> dataSourceSegments) { + Map<String, DataSourcePoolProperties> result = new LinkedHashMap<>(dataSourceSegments.size(), 1); + for (DataSourceSegment each : dataSourceSegments) { + if (each instanceof AlterPoolPropertiesSegment) { + result.put(each.getName(), convertForAlterPoolProps(resourceMetaData, (AlterPoolPropertiesSegment) each)); + } else { + result.put(each.getName(), new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", createProperties(databaseType, each))); + } + } + return result; + } + + @SphereEx + private static DataSourcePoolProperties convertForAlterPoolProps(final ResourceMetaData resourceMetaData, final AlterPoolPropertiesSegment segment) { + StorageUnit storageUnit = resourceMetaData.getStorageUnits().get(segment.getName()); + Map<String, Object> props = getAlteredPoolProps(getCurrentProps(storageUnit.getDataSourcePoolProperties().getAllStandardProperties()), segment); + return new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", props); + } + + @SphereEx + private static Map<String, Object> getCurrentProps(final Map<String, Object> currentProps) { + Map<String, Object> result = new HashMap<>(10, 1L); + for (Entry<String, Object> entry : currentProps.entrySet()) { + if (null != entry.getValue()) { + result.put(entry.getKey(), entry.getValue()); + } + } + return result; + } + + @SphereEx + private static Map<String, Object> getAlteredPoolProps(final Map<String, Object> props, final AlterPoolPropertiesSegment segment) { + Properties toBeAlteredProperties = segment.getProps(); + if (toBeAlteredProperties.isEmpty()) { + return props; + } + log.warn("Alter pool properties for storage unit {}, props: {}", segment.getName(), toBeAlteredProperties); + putPropsIfPresent(toBeAlteredProperties, props, "maxPoolSize"); + putPropsIfPresent(toBeAlteredProperties, props, "minPoolSize"); + putPropsIfPresent(toBeAlteredProperties, props, "connectionTimeoutMilliseconds"); + putPropsIfPresent(toBeAlteredProperties, props, "idleTimeoutMilliseconds"); + putPropsIfPresent(toBeAlteredProperties, props, "maxLifetimeMilliseconds"); + return props; + } + + @SphereEx + private static Map<String, String> getPropertySynonyms() { + Map<String, String> result = new HashMap<>(5, 1F); + result.put("connectionTimeoutMilliseconds", "connectionTimeout"); + result.put("idleTimeoutMilliseconds", "idleTimeout"); + result.put("maxLifetimeMilliseconds", "maxLifetime"); + result.put("maxPoolSize", "maximumPoolSize"); + result.put("minPoolSize", "minimumIdle"); + return result; + } + + @SphereEx + private static void putPropsIfPresent(final Properties toBeAlteredProperties, final Map<String, Object> props, final String key) { + Map<String, String> propertySynonyms = getPropertySynonyms(); + if (toBeAlteredProperties.containsKey(key)) { + props.put(key, toBeAlteredProperties.getProperty(key)); + } else if (toBeAlteredProperties.containsKey(propertySynonyms.get(key))) { + props.put(key, toBeAlteredProperties.get(propertySynonyms.get(key))); + } + } @SuppressWarnings({"unchecked", "rawtypes"}) private static Map<String, Object> createProperties(final DatabaseType databaseType, final DataSourceSegment segment) {
