From 0d62a4b20f78a028882b6e31b8d990473cfd1269 Mon Sep 17 00:00:00 2001 From: Liang Zhang Date: Fri, 11 Aug 2023 20:04:08 +0800 Subject: [PATCH] Rename DataSourceProperties to DataSourcePoolProperties (#28041) --- .../database/DatabaseConfiguration.java | 8 +- ...aSourceGeneratedDatabaseConfiguration.java | 10 +- ...taSourceProvidedDatabaseConfiguration.java | 21 +- .../instance/mode/ModeContextManager.java | 10 +- .../database/ShardingSphereDatabase.java | 9 +- .../database/resource/ResourceMetaData.java | 18 +- .../resource/StorageUnitMetaData.java | 16 +- .../YamlDataSourceConfigurationSwapper.java | 32 +-- ...rceGeneratedDatabaseConfigurationTest.java | 6 +- ...urceProvidedDatabaseConfigurationTest.java | 6 +- ...lDataSourcePoolPropertiesSwapperTest.java} | 10 +- .../pool/creator/DataSourcePoolCreator.java | 82 ++++---- .../pool/creator/DataSourceReflection.java | 12 +- .../DataSourcePoolPropertiesValidator.java | 6 +- ...ies.java => DataSourcePoolProperties.java} | 48 ++--- ...a => DataSourcePoolPropertiesCreator.java} | 38 ++-- ...=> DataSourcePoolPropertiesValidator.java} | 28 +-- ...lidDataSourcePoolPropertiesException.java} | 6 +- ...va => CustomDataSourcePoolProperties.java} | 8 +- .../storage/StorageResourceUtils.java | 8 +- .../StorageResourceWithProperties.java | 8 +- .../creator/DataSourcePoolCreatorTest.java | 6 +- ... DataSourcePoolPropertiesCreatorTest.java} | 22 +- ...java => DataSourcePoolPropertiesTest.java} | 38 ++-- ...ataSourcePoolPropertiesValidatorTest.java} | 6 +- ...> CustomDataSourcePoolPropertiesTest.java} | 4 +- .../C3P0DataSourcePoolCreatorTest.java | 6 +- .../DBCPDataSourcePoolCreatorTest.java | 6 +- infra/datasource/type/druid/pom.xml | 54 ----- .../DruidDataSourcePoolFieldMetaData.java | 36 ---- .../metadata/DruidDataSourcePoolMetaData.java | 71 ------- ...ource.pool.metadata.DataSourcePoolMetaData | 18 -- .../DruidDataSourcePoolCreatorTest.java | 53 ----- ...kariDataSourcePoolPropertiesValidator.java | 46 ++--- .../HikariDataSourcePoolCreatorTest.java | 6 +- infra/datasource/type/pom.xml | 1 - ...aSourcePoolPropertiesValidateHandler.java} | 14 +- .../DriverDatabaseConnectionManager.java | 26 +-- .../DriverDatabaseConnectionManagerTest.java | 4 +- ...SpherePipelineDataSourceConfiguration.java | 4 +- ...andardPipelineDataSourceConfiguration.java | 8 +- ...rdPipelineDataSourceConfigurationTest.java | 12 +- .../StandardPipelineDataSourceCreator.java | 4 +- .../PipelineDataSourcePersistService.java | 16 +- .../preparer/PipelineJobPreparerUtils.java | 6 +- .../yaml/YamlJdbcConfigurationTest.java | 4 +- ...sterMigrationSourceStorageUnitUpdater.java | 12 +- .../data/pipeline/cdc/api/impl/CDCJobAPI.java | 10 +- .../migration/api/impl/MigrationJobAPI.java | 44 ++-- .../persist/MetaDataBasedPersistService.java | 4 +- .../persist/MetaDataPersistService.java | 28 +-- .../persist/NewMetaDataPersistService.java | 31 ++- .../database/DatabaseBasedPersistService.java | 6 +- .../DataSourceNodePersistService.java | 28 +-- .../DataSourceUnitPersistService.java | 28 +-- .../NewDataSourceNodePersistService.java | 32 +-- .../NewDataSourceUnitPersistService.java | 32 +-- .../DataSourceUnitPersistServiceTest.java | 18 +- .../database/DatabaseTimestampService.java | 2 +- .../datasource/swapper/DataSourceSwapper.java | 4 +- .../mode/manager/ContextManager.java | 6 +- .../context/ConfigurationContextManager.java | 24 +-- .../switcher/NewResourceSwitchManager.java | 30 +-- .../switcher/ResourceSwitchManager.java | 79 ++++--- .../manager/switcher/SwitchingResource.java | 5 - .../mode/manager/ContextManagerTest.java | 4 +- .../switcher/ResourceSwitchManagerTest.java | 22 +- .../switcher/SwitchingResourceTest.java | 2 +- .../cluster/ClusterModeContextManager.java | 16 +- .../cluster/NewClusterModeContextManager.java | 22 +- .../DataSourceNodesChangedEvent.java | 4 +- .../DataSourceUnitsChangedEvent.java | 4 +- .../watcher/MetaDataChangedWatcher.java | 14 +- .../ConfigurationChangedSubscriber.java | 2 +- .../ClusterModeContextManagerTest.java | 193 ++++++++---------- .../ConfigurationChangedSubscriberTest.java | 16 +- ...ResourceMetaDataChangedSubscriberTest.java | 16 +- .../NewStandaloneModeContextManager.java | 38 ++-- .../StandaloneModeContextManager.java | 36 ++-- .../DataSourceSegmentsConverter.java | 8 +- .../DataSourceSegmentsConverterTest.java | 4 +- .../YamlProxyConfigurationSwapper.java | 4 +- .../ConvertYamlConfigurationExecutor.java | 22 +- .../queryable/ExportStorageNodesExecutor.java | 4 +- .../unit/AlterStorageUnitBackendHandler.java | 18 +- .../RegisterStorageUnitBackendHandler.java | 20 +- .../storage/unit/ShowStorageUnitExecutor.java | 47 +++-- .../proxy/backend/util/ExportUtils.java | 12 +- ...mlDatabaseConfigurationImportExecutor.java | 18 +- ...portDatabaseConfigurationExecutorTest.java | 6 +- .../queryable/ExportMetaDataExecutorTest.java | 6 +- ...mportDatabaseConfigurationUpdaterTest.java | 4 +- .../AlterStorageUnitBackendHandlerTest.java | 4 +- ...RegisterStorageUnitBackendHandlerTest.java | 4 +- .../hbase/config/YamlHBaseConfiguration.java | 2 +- .../api/impl/MigrationJobAPITest.java | 12 +- 96 files changed, 791 insertions(+), 1077 deletions(-) rename infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/{YamlDataSourcePropertiesSwapperTest.java => YamlDataSourcePoolPropertiesSwapperTest.java} (93%) rename infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/{DataSourceProperties.java => DataSourcePoolProperties.java} (74%) rename infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/{DataSourcePropertiesCreator.java => DataSourcePoolPropertiesCreator.java} (85%) rename infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/{DataSourcePropertiesValidator.java => DataSourcePoolPropertiesValidator.java} (74%) rename infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/{InvalidDataSourcePropertiesException.java => InvalidDataSourcePoolPropertiesException.java} (81%) rename infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/{CustomDataSourceProperties.java => CustomDataSourcePoolProperties.java} (86%) rename infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/{DataSourcePropertiesCreatorTest.java => DataSourcePoolPropertiesCreatorTest.java} (83%) rename infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/{DataSourcePropertiesTest.java => DataSourcePoolPropertiesTest.java} (71%) rename infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/{DataSourcePropertiesValidatorTest.java => DataSourcePoolPropertiesValidatorTest.java} (86%) rename infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/{CustomDataSourcePropertiesTest.java => CustomDataSourcePoolPropertiesTest.java} (95%) delete mode 100644 infra/datasource/type/druid/pom.xml delete mode 100644 infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java delete mode 100644 infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java delete mode 100644 infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData delete mode 100644 infra/datasource/type/druid/src/test/java/org/apache/shardingsphere/infra/datasource/druid/creator/DruidDataSourcePoolCreatorTest.java rename infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/{DataSourcePropertiesValidateHandler.java => DataSourcePoolPropertiesValidateHandler.java} (77%) diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java index 87600de7f7b10..224bc0b9240b7 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.infra.config.database; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import javax.sql.DataSource; @@ -52,9 +52,9 @@ public interface DatabaseConfiguration { StorageResource getStorageResource(); /** - * Get data source props map. + * Get data source pool properties map. * - * @return data source props map + * @return data source pool properties map */ - Map getDataSourcePropsMap(); + Map getDataSourcePoolPropertiesMap(); } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java index 5f39c1f5c0909..5430af7112769 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java @@ -22,8 +22,8 @@ import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import javax.sql.DataSource; @@ -40,12 +40,12 @@ public final class DataSourceGeneratedDatabaseConfiguration implements DatabaseC private final Collection ruleConfigurations; - private final Map dataSourcePropsMap; + private final Map dataSourcePoolPropertiesMap; public DataSourceGeneratedDatabaseConfiguration(final Map dataSourceConfigs, final Collection ruleConfigs) { ruleConfigurations = ruleConfigs; - dataSourcePropsMap = DataSourcePropertiesCreator.createFromConfiguration(dataSourceConfigs); - this.storageResource = DataSourcePoolCreator.createStorageResource(dataSourcePropsMap); + dataSourcePoolPropertiesMap = DataSourcePoolPropertiesCreator.createFromConfiguration(dataSourceConfigs); + this.storageResource = DataSourcePoolCreator.createStorageResource(dataSourcePoolPropertiesMap); } @Override diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java index f3a92b9cc05e2..c0c6d558c3b12 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java @@ -18,10 +18,11 @@ package org.apache.shardingsphere.infra.config.database.impl; import lombok.Getter; +import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.datasource.storage.StorageResourceUtils; @@ -35,6 +36,7 @@ /** * Data source provided database configuration. */ +@RequiredArgsConstructor @Getter public final class DataSourceProvidedDatabaseConfiguration implements DatabaseConfiguration { @@ -42,24 +44,17 @@ public final class DataSourceProvidedDatabaseConfiguration implements DatabaseCo private final Collection ruleConfigurations; - private final Map dataSourcePropsMap; + private final Map dataSourcePoolPropertiesMap; public DataSourceProvidedDatabaseConfiguration(final Map dataSources, final Collection ruleConfigurations) { this.ruleConfigurations = ruleConfigurations; this.storageResource = new StorageResource(StorageResourceUtils.getStorageNodeDataSources(dataSources), StorageResourceUtils.getStorageUnitNodeMappers(dataSources)); - dataSourcePropsMap = createDataSourcePropertiesMap(dataSources); + dataSourcePoolPropertiesMap = createDataSourcePoolPropertiesMap(dataSources); } - public DataSourceProvidedDatabaseConfiguration(final StorageResource storageResource, final Collection ruleConfigurations, - final Map dataSourcePropsMap) { - this.ruleConfigurations = ruleConfigurations; - this.storageResource = storageResource; - this.dataSourcePropsMap = dataSourcePropsMap; - } - - private Map createDataSourcePropertiesMap(final Map dataSources) { + private Map createDataSourcePoolPropertiesMap(final Map dataSources) { return dataSources.entrySet().stream().collect(Collectors - .toMap(Entry::getKey, entry -> DataSourcePropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + .toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @Override diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java index d2066f8d3fd02..4ab64d8c105a5 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.infra.instance.mode; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaPOJO; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; @@ -82,19 +82,19 @@ public interface ModeContextManager { * Register storage units. * * @param databaseName database name - * @param toBeRegisterStorageUnitProps to be register storage unit props + * @param toBeRegisteredProps to be registered storage unit properties * @throws SQLException SQL exception */ - void registerStorageUnits(String databaseName, Map toBeRegisterStorageUnitProps) throws SQLException; + void registerStorageUnits(String databaseName, Map toBeRegisteredProps) throws SQLException; /** * Alter storage units. * * @param databaseName database name - * @param toBeUpdatedStorageUnitProps to be updated storage unit props + * @param toBeUpdatedProps to be updated storage unit properties * @throws SQLException SQL exception */ - void alterStorageUnits(String databaseName, Map toBeUpdatedStorageUnitProps) throws SQLException; + void alterStorageUnits(String databaseName, Map toBeUpdatedProps) throws SQLException; /** * Unregister storage units. diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java index 30bdba1599b52..87f65921be4b1 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java @@ -24,7 +24,7 @@ import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; @@ -118,14 +118,13 @@ public static ShardingSphereDatabase create(final String name, final DatabaseTyp */ public static ShardingSphereDatabase create(final String name, final DatabaseType protocolType, final DatabaseConfiguration databaseConfig, final Collection rules, final Map schemas) { - ResourceMetaData resourceMetaData = createResourceMetaData(name, databaseConfig.getStorageResource(), databaseConfig.getDataSourcePropsMap()); + ResourceMetaData resourceMetaData = createResourceMetaData(name, databaseConfig.getStorageResource(), databaseConfig.getDataSourcePoolPropertiesMap()); RuleMetaData ruleMetaData = new RuleMetaData(rules); return new ShardingSphereDatabase(name, protocolType, resourceMetaData, ruleMetaData, schemas); } - private static ResourceMetaData createResourceMetaData(final String databaseName, final StorageResource storageResource, - final Map dataSourcePropsMap) { - return new ResourceMetaData(databaseName, storageResource, dataSourcePropsMap); + private static ResourceMetaData createResourceMetaData(final String databaseName, final StorageResource storageResource, final Map propsMap) { + return new ResourceMetaData(databaseName, storageResource, propsMap); } /** diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java index fc6e62918a337..a4fd7a9f57e9b 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java @@ -21,8 +21,8 @@ import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.datasource.storage.StorageResourceUtils; @@ -51,12 +51,12 @@ public ResourceMetaData(final Map dataSources) { public ResourceMetaData(final String databaseName, final Map dataSources) { storageNodeDataSources = StorageResourceUtils.getStorageNodeDataSources(dataSources); storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources, - DataSourcePropertiesCreator.create(dataSources), StorageResourceUtils.getStorageUnitNodeMappers(dataSources)); + DataSourcePoolPropertiesCreator.create(dataSources), StorageResourceUtils.getStorageUnitNodeMappers(dataSources)); } - public ResourceMetaData(final String databaseName, final StorageResource storageResource, final Map dataSourcePropsMap) { + public ResourceMetaData(final String databaseName, final StorageResource storageResource, final Map propsMap) { storageNodeDataSources = storageResource.getStorageNodeDataSources(); - storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources, dataSourcePropsMap, storageResource.getStorageUnitNodeMappers()); + storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources, propsMap, storageResource.getStorageUnitNodeMappers()); } /** @@ -84,7 +84,7 @@ public Map getStorageTypes() { */ public Collection getAllInstanceDataSourceNames() { Collection result = new LinkedList<>(); - for (Entry entry : storageUnitMetaData.getConnectionPropsMap().entrySet()) { + for (Entry entry : storageUnitMetaData.getConnectionPropertiesMap().entrySet()) { if (!isExisted(entry.getKey(), result)) { result.add(entry.getKey()); } @@ -93,8 +93,8 @@ public Collection getAllInstanceDataSourceNames() { } private boolean isExisted(final String dataSourceName, final Collection existedDataSourceNames) { - return existedDataSourceNames.stream().anyMatch(each -> storageUnitMetaData.getConnectionPropsMap().get(dataSourceName) - .isInSameDatabaseInstance(storageUnitMetaData.getConnectionPropsMap().get(each))); + return existedDataSourceNames.stream().anyMatch(each -> storageUnitMetaData.getConnectionPropertiesMap().get(dataSourceName) + .isInSameDatabaseInstance(storageUnitMetaData.getConnectionPropertiesMap().get(each))); } /** @@ -104,7 +104,7 @@ private boolean isExisted(final String dataSourceName, final Collection * @return connection properties */ public ConnectionProperties getConnectionProperties(final String dataSourceName) { - return storageUnitMetaData.getConnectionPropsMap().get(dataSourceName); + return storageUnitMetaData.getConnectionPropertiesMap().get(dataSourceName); } /** diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java index d833e196276cd..03727407e841d 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java @@ -24,8 +24,8 @@ import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datasource.CatalogSwitchableDataSource; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageUnitNodeMapper; import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; @@ -45,22 +45,22 @@ public final class StorageUnitMetaData { private final Map dataSources; - private final Map dataSourcePropsMap; + private final Map dataSourcePoolPropertiesMap; private final Map storageTypes; private final Map unitNodeMappers; - private final Map connectionPropsMap; + private final Map connectionPropertiesMap; public StorageUnitMetaData(final String databaseName, final Map storageNodeDataSources, - final Map dataSourcePropsMap, final Map unitNodeMappers) { + final Map dataSourcePoolPropertiesMap, final Map unitNodeMappers) { this.unitNodeMappers = unitNodeMappers; this.dataSources = getStorageUnitDataSources(storageNodeDataSources, unitNodeMappers); - this.dataSourcePropsMap = dataSourcePropsMap; + this.dataSourcePoolPropertiesMap = dataSourcePoolPropertiesMap; Map enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources); storageTypes = createStorageTypes(enabledStorageNodeDataSources, unitNodeMappers); - connectionPropsMap = createConnectionPropertiesMap(enabledStorageNodeDataSources, storageTypes, unitNodeMappers); + connectionPropertiesMap = createConnectionPropertiesMap(enabledStorageNodeDataSources, storageTypes, unitNodeMappers); } private Map getEnabledStorageNodeDataSources(final String databaseName, final Map storageNodeDataSources) { @@ -97,7 +97,7 @@ private Map createConnectionPropertiesMap(final Ma Map result = new LinkedHashMap<>(unitNodeMappers.size(), 1F); for (Entry entry : unitNodeMappers.entrySet()) { if (enabledStorageNodeDataSources.containsKey(entry.getValue().getStorageNode())) { - Map standardProps = DataSourcePropertiesCreator.create(enabledStorageNodeDataSources.get(entry.getValue().getStorageNode())) + Map standardProps = DataSourcePoolPropertiesCreator.create(enabledStorageNodeDataSources.get(entry.getValue().getStorageNode())) .getConnectionPropertySynonyms().getStandardProperties(); DatabaseType storageType = storageTypes.get(entry.getKey()); ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType); diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java index 53535e3ba1715..6537b2c2b8d63 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java @@ -19,7 +19,7 @@ import com.google.common.base.Preconditions; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.yaml.config.pojo.YamlRootConfiguration; import javax.sql.DataSource; @@ -56,31 +56,31 @@ public Map swapToDataSources(final Map swapToDataSources(final Map> yamlDataSources, final boolean cacheEnabled) { - return DataSourcePoolCreator.create(yamlDataSources.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapToDataSourceProperties(entry.getValue()))), cacheEnabled); + return DataSourcePoolCreator.create(yamlDataSources.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapToDataSourcePoolProperties(entry.getValue()))), cacheEnabled); } /** - * Get data source properties. + * Get data source pool properties. * - * @param yamlRootConfig yaml root configuration - * @return data source name to data source properties map + * @param yamlRootConfig YAML root configuration + * @return data source name to data source pool properties map */ - public Map getDataSourcePropertiesMap(final YamlRootConfiguration yamlRootConfig) { + public Map getDataSourcePoolPropertiesMap(final YamlRootConfiguration yamlRootConfig) { Map> yamlDataSourceConfigs = yamlRootConfig.getDataSources(); - Map result = new LinkedHashMap<>(yamlDataSourceConfigs.size(), 1F); - yamlDataSourceConfigs.forEach((key, value) -> result.put(key, swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSourceConfigs.size(), 1F); + yamlDataSourceConfigs.forEach((key, value) -> result.put(key, swapToDataSourcePoolProperties(value))); return result; } /** - * Swap to data source properties. + * Swap to data source pool properties. * * @param yamlConfig YAML configurations - * @return data source properties + * @return data source pool properties */ - public DataSourceProperties swapToDataSourceProperties(final Map yamlConfig) { + public DataSourcePoolProperties swapToDataSourcePoolProperties(final Map yamlConfig) { Preconditions.checkState(yamlConfig.containsKey(DATA_SOURCE_CLASS_NAME_KEY), "%s can not be null.", DATA_SOURCE_CLASS_NAME_KEY); - return new DataSourceProperties(yamlConfig.get(DATA_SOURCE_CLASS_NAME_KEY).toString(), getProperties(yamlConfig)); + return new DataSourcePoolProperties(yamlConfig.get(DATA_SOURCE_CLASS_NAME_KEY).toString(), getProperties(yamlConfig)); } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -99,12 +99,12 @@ private Map getProperties(final Map yamlConfig) /** * Swap to map from data source properties. * - * @param dataSourceProps data source properties + * @param props data source pool properties * @return data source map */ - public Map swapToMap(final DataSourceProperties dataSourceProps) { - Map result = new HashMap<>(dataSourceProps.getAllStandardProperties()); - result.put(DATA_SOURCE_CLASS_NAME_KEY, dataSourceProps.getDataSourceClassName()); + public Map swapToMap(final DataSourcePoolProperties props) { + Map result = new HashMap<>(props.getAllStandardProperties()); + result.put(DATA_SOURCE_CLASS_NAME_KEY, props.getPoolClassName()); return result; } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java index cba32437cf822..3d06e1ae732e0 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java @@ -22,7 +22,7 @@ import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration; import org.junit.jupiter.api.Test; @@ -72,9 +72,9 @@ void assertGetRuleConfigurations() { } @Test - void assertGetDataSourceProperties() { + void assertGetDataSourcePoolProperties() { DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration(); - DataSourceProperties props = databaseConfig.getDataSourcePropsMap().get("normal_db"); + DataSourcePoolProperties props = databaseConfig.getDataSourcePoolPropertiesMap().get("normal_db"); Map poolStandardProps = props.getPoolPropertySynonyms().getStandardProperties(); assertThat(poolStandardProps.size(), is(6)); assertThat(poolStandardProps.get("connectionTimeoutMilliseconds"), is(2000L)); diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java index 635a45601ce02..b2f8d98adbb47 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.infra.config.database.impl; import org.apache.shardingsphere.infra.datasource.CatalogSwitchableDataSource; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; @@ -67,9 +67,9 @@ void assertGetRuleConfigurations() { } @Test - void assertGetDataSourceProperties() { + void assertGetDataSourcePoolProperties() { DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration(); - DataSourceProperties props = databaseConfig.getDataSourcePropsMap().get("foo_ds"); + DataSourcePoolProperties props = databaseConfig.getDataSourcePoolPropertiesMap().get("foo_ds"); Map poolStandardProps = props.getPoolPropertySynonyms().getStandardProperties(); assertThat(poolStandardProps.size(), is(0)); Map connStandardProps = props.getConnectionPropertySynonyms().getStandardProperties(); diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePropertiesSwapperTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePoolPropertiesSwapperTest.java similarity index 93% rename from infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePropertiesSwapperTest.java rename to infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePoolPropertiesSwapperTest.java index f603ab0323151..9225c638657ee 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePropertiesSwapperTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePoolPropertiesSwapperTest.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.infra.yaml.config.swapper.resource; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -29,7 +29,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -class YamlDataSourcePropertiesSwapperTest { +class YamlDataSourcePoolPropertiesSwapperTest { private final YamlDataSourceConfigurationSwapper swapper = new YamlDataSourceConfigurationSwapper(); @@ -48,12 +48,12 @@ void assertSwapToDataSources() { } @Test - void assertSwapToDataSourceProperties() { + void assertSwapToDataSourcePoolProperties() { Map yamlConfig = new HashMap<>(3, 1F); yamlConfig.put("dataSourceClassName", MockedDataSource.class.getName()); yamlConfig.put("url", "xx:xxx"); yamlConfig.put("username", "root"); - DataSourceProperties actual = swapper.swapToDataSourceProperties(yamlConfig); + DataSourcePoolProperties actual = swapper.swapToDataSourcePoolProperties(yamlConfig); assertThat(actual.getAllLocalProperties().size(), is(3)); assertThat(actual.getAllLocalProperties().get("dataSourceClassName").toString(), is("org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource")); assertThat(actual.getAllLocalProperties().get("url").toString(), is("xx:xxx")); @@ -62,7 +62,7 @@ void assertSwapToDataSourceProperties() { @Test void assertSwapToMap() { - Map actual = swapper.swapToMap(new DataSourceProperties(MockedDataSource.class.getName(), createProperties())); + Map actual = swapper.swapToMap(new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties())); assertThat(actual.get("dataSourceClassName"), is(MockedDataSource.class.getName())); assertThat(actual.get("url").toString(), is("xx:xxx")); assertThat(actual.get("username").toString(), is("root")); diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java index 989d82b7f678b..b9871266a08c3 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java @@ -31,8 +31,8 @@ import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaDataReflection; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageNodeProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; @@ -56,24 +56,24 @@ public final class DataSourcePoolCreator { /** * Create storage resource. * - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map * @return created storage resource */ - public static StorageResource createStorageResource(final Map dataSourcePropsMap) { - return createStorageResource(dataSourcePropsMap, true); + public static StorageResource createStorageResource(final Map propsMap) { + return createStorageResource(propsMap, true); } /** * Create storage resource. * - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map * @param cacheEnabled cache enabled * @return created storage resource */ - public static StorageResource createStorageResource(final Map dataSourcePropsMap, final boolean cacheEnabled) { + public static StorageResource createStorageResource(final Map propsMap, final boolean cacheEnabled) { Map storageNodes = new LinkedHashMap<>(); Map storageUnitNodeMappers = new LinkedHashMap<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { + for (Entry entry : propsMap.entrySet()) { StorageNodeProperties storageNodeProps = getStorageNodeProperties(entry.getKey(), entry.getValue()); StorageNode storageNode = new StorageNode(storageNodeProps.getName()); if (storageNodes.containsKey(storageNode)) { @@ -100,14 +100,14 @@ public static StorageResource createStorageResource(final Map dataSourcePropsMap) { + public static StorageResourceWithProperties createStorageResourceWithoutDataSource(final Map propsMap) { Map storageNodes = new LinkedHashMap<>(); Map storageUnitNodeMappers = new LinkedHashMap<>(); - Map dataSourcePropertiesMap = new LinkedHashMap<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { + Map newPropsMap = new LinkedHashMap<>(); + for (Entry entry : propsMap.entrySet()) { StorageNodeProperties storageNodeProperties = getStorageNodeProperties(entry.getKey(), entry.getValue()); StorageNode storageNode = new StorageNode(storageNodeProperties.getName()); if (storageNodes.containsKey(storageNode)) { @@ -116,14 +116,14 @@ public static StorageResourceWithProperties createStorageResourceWithoutDataSour } storageNodes.put(storageNode, null); appendStorageUnitNodeMapper(storageUnitNodeMappers, storageNodeProperties, entry.getKey(), entry.getValue()); - dataSourcePropertiesMap.put(storageNodeProperties.getName(), entry.getValue()); + newPropsMap.put(storageNodeProperties.getName(), entry.getValue()); } - return new StorageResourceWithProperties(storageNodes, storageUnitNodeMappers, dataSourcePropertiesMap); + return new StorageResourceWithProperties(storageNodes, storageUnitNodeMappers, newPropsMap); } private static void appendStorageUnitNodeMapper(final Map storageUnitNodeMappers, final StorageNodeProperties storageNodeProps, - final String unitName, final DataSourceProperties dataSourceProps) { - String url = dataSourceProps.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); + final String unitName, final DataSourcePoolProperties props) { + String url = props.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); storageUnitNodeMappers.put(unitName, getStorageUnitNodeMapper(storageNodeProps, unitName, url)); } @@ -134,7 +134,7 @@ private static StorageUnitNodeMapper getStorageUnitNodeMapper(final StorageNodeP : new StorageUnitNodeMapper(unitName, new StorageNode(storageNodeProps.getName()), url); } - private static StorageNodeProperties getStorageNodeProperties(final String dataSourceName, final DataSourceProperties storageNodeProps) { + private static StorageNodeProperties getStorageNodeProperties(final String dataSourceName, final DataSourcePoolProperties storageNodeProps) { Map standardProperties = storageNodeProps.getConnectionPropertySynonyms().getStandardProperties(); String url = standardProperties.get("url").toString(); String username = standardProperties.get("username").toString(); @@ -160,23 +160,23 @@ private static String generateStorageNodeName(final String hostname, final int p /** * Create data sources. * - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map * @return created data sources */ - public static Map create(final Map dataSourcePropsMap) { - return create(dataSourcePropsMap, true); + public static Map create(final Map propsMap) { + return create(propsMap, true); } /** * Create data sources. * - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map * @param cacheEnabled cache enabled * @return created data sources */ - public static Map create(final Map dataSourcePropsMap, final boolean cacheEnabled) { + public static Map create(final Map propsMap, final boolean cacheEnabled) { Map result = new LinkedHashMap<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { + for (Entry entry : propsMap.entrySet()) { DataSource dataSource; try { dataSource = create(entry.getKey(), entry.getValue(), cacheEnabled); @@ -196,20 +196,20 @@ public static Map create(final Map poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSourceProps.getDataSourceClassName()); + public static DataSource create(final DataSourcePoolProperties props) { + DataSource result = createDataSource(props.getPoolClassName()); + Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, props.getPoolClassName()); DataSourceReflection dataSourceReflection = new DataSourceReflection(result); if (poolMetaData.isPresent()) { setDefaultFields(dataSourceReflection, poolMetaData.get()); - setConfiguredFields(dataSourceProps, dataSourceReflection, poolMetaData.get()); - appendJdbcUrlProperties(dataSourceProps.getCustomDataSourceProperties(), result, poolMetaData.get(), dataSourceReflection); - dataSourceReflection.addDefaultDataSourceProperties(poolMetaData.get()); + setConfiguredFields(props, dataSourceReflection, poolMetaData.get()); + appendJdbcUrlProperties(props.getCustomDataSourcePoolProperties(), result, poolMetaData.get(), dataSourceReflection); + dataSourceReflection.addDefaultDataSourcePoolProperties(poolMetaData.get()); } else { - setConfiguredFields(dataSourceProps, dataSourceReflection); + setConfiguredFields(props, dataSourceReflection); } return result; } @@ -218,12 +218,12 @@ public static DataSource create(final DataSourceProperties dataSourceProps) { * Create data source. * * @param dataSourceName data source name - * @param dataSourceProps data source properties + * @param props data source pool properties * @param cacheEnabled cache enabled * @return created data source */ - public static DataSource create(final String dataSourceName, final DataSourceProperties dataSourceProps, final boolean cacheEnabled) { - DataSource result = create(dataSourceProps); + public static DataSource create(final String dataSourceName, final DataSourcePoolProperties props, final boolean cacheEnabled) { + DataSource result = create(props); if (cacheEnabled && !GlobalDataSourceRegistry.getInstance().getCachedDataSources().containsKey(dataSourceName)) { GlobalDataSourceRegistry.getInstance().getCachedDataSources().put(dataSourceName, result); } @@ -241,14 +241,14 @@ private static void setDefaultFields(final DataSourceReflection dataSourceReflec } } - private static void setConfiguredFields(final DataSourceProperties dataSourceProps, final DataSourceReflection dataSourceReflection) { - for (Entry entry : dataSourceProps.getAllLocalProperties().entrySet()) { + private static void setConfiguredFields(final DataSourcePoolProperties props, final DataSourceReflection dataSourceReflection) { + for (Entry entry : props.getAllLocalProperties().entrySet()) { dataSourceReflection.setField(entry.getKey(), entry.getValue()); } } - private static void setConfiguredFields(final DataSourceProperties dataSourceProps, final DataSourceReflection dataSourceReflection, final DataSourcePoolMetaData poolMetaData) { - for (Entry entry : dataSourceProps.getAllLocalProperties().entrySet()) { + private static void setConfiguredFields(final DataSourcePoolProperties props, final DataSourceReflection dataSourceReflection, final DataSourcePoolMetaData poolMetaData) { + for (Entry entry : props.getAllLocalProperties().entrySet()) { String fieldName = entry.getKey(); Object fieldValue = entry.getValue(); if (isValidProperty(fieldName, fieldValue, poolMetaData) && !fieldName.equals(poolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName())) { @@ -262,11 +262,11 @@ private static boolean isValidProperty(final String key, final Object value, fin } @SuppressWarnings("unchecked") - private static void appendJdbcUrlProperties(final CustomDataSourceProperties customDataSourceProps, final DataSource targetDataSource, final DataSourcePoolMetaData poolMetaData, + private static void appendJdbcUrlProperties(final CustomDataSourcePoolProperties customPoolProps, final DataSource targetDataSource, final DataSourcePoolMetaData poolMetaData, final DataSourceReflection dataSourceReflection) { String jdbcUrlPropertiesFieldName = poolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName(); - if (null != jdbcUrlPropertiesFieldName && customDataSourceProps.getProperties().containsKey(jdbcUrlPropertiesFieldName)) { - Map jdbcUrlProps = (Map) customDataSourceProps.getProperties().get(jdbcUrlPropertiesFieldName); + if (null != jdbcUrlPropertiesFieldName && customPoolProps.getProperties().containsKey(jdbcUrlPropertiesFieldName)) { + Map jdbcUrlProps = (Map) customPoolProps.getProperties().get(jdbcUrlPropertiesFieldName); DataSourcePoolMetaDataReflection dataSourcePoolMetaDataReflection = new DataSourcePoolMetaDataReflection(targetDataSource, poolMetaData.getFieldMetaData()); dataSourcePoolMetaDataReflection.getJdbcConnectionProperties().ifPresent(optional -> setJdbcUrlProperties(dataSourceReflection, optional, jdbcUrlProps, jdbcUrlPropertiesFieldName)); } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java index d74630d6b9486..113378111473e 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java @@ -160,11 +160,11 @@ private Optional findSetterMethod(final String fieldName) { } /** - * Add default data source properties. + * Add default data source pool properties. * - * @param dataSourcePoolMetaData data source pool meta data + * @param metaData data source pool meta data */ - public void addDefaultDataSourceProperties(final DataSourcePoolMetaData dataSourcePoolMetaData) { + public void addDefaultDataSourcePoolProperties(final DataSourcePoolMetaData metaData) { DataSourcePoolMetaDataReflection dataSourcePoolMetaDataReflection = new DataSourcePoolMetaDataReflection(dataSource, TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSource.getClass().getName()) .map(DataSourcePoolMetaData::getFieldMetaData).orElseGet(DefaultDataSourcePoolFieldMetaData::new)); @@ -183,10 +183,10 @@ public void addDefaultDataSourceProperties(final DataSourcePoolMetaData dataSour jdbcProps.setProperty(defaultPropertyKey, defaultPropertyValue); } } - setField(dataSourcePoolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName(), jdbcProps); + setField(metaData.getFieldMetaData().getJdbcUrlPropertiesFieldName(), jdbcProps); } - private boolean containsDefaultProperty(final String defaultPropertyKey, final Properties targetDataSourceProps, final Properties queryProps) { - return targetDataSourceProps.containsKey(defaultPropertyKey) || queryProps.containsKey(defaultPropertyKey); + private boolean containsDefaultProperty(final String defaultPropKey, final Properties targetProps, final Properties queryProps) { + return targetProps.containsKey(defaultPropKey) || queryProps.containsKey(defaultPropKey); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java index 2d107bc975aa7..8ca46f95c4a3f 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.infra.datasource.pool.metadata; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; /** * Data source pool properties validator. @@ -27,7 +27,7 @@ public interface DataSourcePoolPropertiesValidator { /** * Validate data source properties. * - * @param dataSourceProps data source properties + * @param props data source pool properties */ - void validate(DataSourceProperties dataSourceProps); + void validate(DataSourcePoolProperties props); } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourceProperties.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolProperties.java similarity index 74% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourceProperties.java rename to infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolProperties.java index c668f21388555..69507941cb81e 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourceProperties.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolProperties.java @@ -20,9 +20,9 @@ import com.google.common.base.Objects; import lombok.Getter; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; +import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.ConnectionPropertySynonyms; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.PoolPropertySynonyms; -import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourceProperties; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.util.Collection; @@ -34,28 +34,28 @@ import java.util.Optional; /** - * Data source properties. + * Data source pool properties. */ @Getter -public final class DataSourceProperties { +public final class DataSourcePoolProperties { private static final String DEFAULT_DATA_SOURCE_CLASS = "com.zaxxer.hikari.HikariDataSource"; - private final String dataSourceClassName; + private final String poolClassName; private final ConnectionPropertySynonyms connectionPropertySynonyms; private final PoolPropertySynonyms poolPropertySynonyms; - private final CustomDataSourceProperties customDataSourceProperties; + private final CustomDataSourcePoolProperties customDataSourcePoolProperties; - public DataSourceProperties(final String dataSourceClassName, final Map props) { - this.dataSourceClassName = dataSourceClassName; - Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSourceClassName); + public DataSourcePoolProperties(final String poolClassName, final Map props) { + this.poolClassName = poolClassName; + Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, poolClassName); Map propertySynonyms = poolMetaData.isPresent() ? poolMetaData.get().getPropertySynonyms() : Collections.emptyMap(); connectionPropertySynonyms = new ConnectionPropertySynonyms(props, propertySynonyms); poolPropertySynonyms = new PoolPropertySynonyms(props, propertySynonyms); - customDataSourceProperties = new CustomDataSourceProperties( + customDataSourcePoolProperties = new CustomDataSourcePoolProperties( props, getStandardPropertyKeys(), poolMetaData.isPresent() ? poolMetaData.get().getTransientFieldNames() : Collections.emptyList(), propertySynonyms); } @@ -66,12 +66,12 @@ private Collection getStandardPropertyKeys() { } /** - * Get data source class name. + * Get data source pool class name. * - * @return data source class name + * @return data source pool class name */ - public String getDataSourceClassName() { - return null == dataSourceClassName ? DEFAULT_DATA_SOURCE_CLASS : dataSourceClassName; + public String getPoolClassName() { + return null == poolClassName ? DEFAULT_DATA_SOURCE_CLASS : poolClassName; } /** @@ -81,10 +81,10 @@ public String getDataSourceClassName() { */ public Map getAllStandardProperties() { Map result = new LinkedHashMap<>( - connectionPropertySynonyms.getStandardProperties().size() + poolPropertySynonyms.getStandardProperties().size() + customDataSourceProperties.getProperties().size(), 1F); + connectionPropertySynonyms.getStandardProperties().size() + poolPropertySynonyms.getStandardProperties().size() + customDataSourcePoolProperties.getProperties().size(), 1F); result.putAll(connectionPropertySynonyms.getStandardProperties()); result.putAll(poolPropertySynonyms.getStandardProperties()); - result.putAll(customDataSourceProperties.getProperties()); + result.putAll(customDataSourcePoolProperties.getProperties()); return result; } @@ -95,30 +95,30 @@ public Map getAllStandardProperties() { */ public Map getAllLocalProperties() { Map result = new LinkedHashMap<>( - connectionPropertySynonyms.getLocalProperties().size() + poolPropertySynonyms.getLocalProperties().size() + customDataSourceProperties.getProperties().size(), 1F); + connectionPropertySynonyms.getLocalProperties().size() + poolPropertySynonyms.getLocalProperties().size() + customDataSourcePoolProperties.getProperties().size(), 1F); result.putAll(connectionPropertySynonyms.getLocalProperties()); result.putAll(poolPropertySynonyms.getLocalProperties()); - result.putAll(customDataSourceProperties.getProperties()); + result.putAll(customDataSourcePoolProperties.getProperties()); return result; } @Override public boolean equals(final Object obj) { - return this == obj || null != obj && getClass() == obj.getClass() && equalsByProperties((DataSourceProperties) obj); + return this == obj || null != obj && getClass() == obj.getClass() && equalsByProperties((DataSourcePoolProperties) obj); } - private boolean equalsByProperties(final DataSourceProperties dataSourceProps) { - if (!dataSourceClassName.equals(dataSourceProps.dataSourceClassName)) { + private boolean equalsByProperties(final DataSourcePoolProperties props) { + if (!poolClassName.equals(props.poolClassName)) { return false; } for (Entry entry : getAllLocalProperties().entrySet()) { - if (!dataSourceProps.getAllLocalProperties().containsKey(entry.getKey())) { + if (!props.getAllLocalProperties().containsKey(entry.getKey())) { continue; } if (entry.getValue() instanceof Map) { - return entry.getValue().equals(dataSourceProps.getAllLocalProperties().get(entry.getKey())); + return entry.getValue().equals(props.getAllLocalProperties().get(entry.getKey())); } - if (!String.valueOf(entry.getValue()).equals(String.valueOf(dataSourceProps.getAllLocalProperties().get(entry.getKey())))) { + if (!String.valueOf(entry.getValue()).equals(String.valueOf(props.getAllLocalProperties().get(entry.getKey())))) { return false; } } @@ -131,6 +131,6 @@ public int hashCode() { for (Entry entry : getAllLocalProperties().entrySet()) { stringBuilder.append(entry.getKey()).append(entry.getValue()); } - return Objects.hashCode(dataSourceClassName, stringBuilder.toString()); + return Objects.hashCode(poolClassName, stringBuilder.toString()); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesCreator.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesCreator.java similarity index 85% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesCreator.java rename to infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesCreator.java index ca6ca1500a389..a3a3af4f07319 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesCreator.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesCreator.java @@ -25,7 +25,7 @@ import org.apache.shardingsphere.infra.datasource.CatalogSwitchableDataSource; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourceReflection; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.ConnectionPropertySynonyms; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.PoolPropertySynonyms; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -39,18 +39,18 @@ import java.util.stream.Collectors; /** - * Data source properties creator. + * Data source pool properties creator. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class DataSourcePropertiesCreator { +public final class DataSourcePoolPropertiesCreator { /** - * Create data source properties. + * Create data source pool properties. * * @param dataSourceConfigs data source configurations - * @return created data source properties + * @return created data source pool properties */ - public static Map createFromConfiguration(final Map dataSourceConfigs) { + public static Map createFromConfiguration(final Map dataSourceConfigs) { return dataSourceConfigs.entrySet().stream().collect(Collectors .toMap(Entry::getKey, entry -> create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @@ -61,8 +61,8 @@ public static Map createFromConfiguration(final Ma * @param dataSourceConfig data source configuration * @return created data source properties */ - public static DataSourceProperties create(final DataSourceConfiguration dataSourceConfig) { - return new DataSourceProperties(dataSourceConfig.getConnection().getDataSourceClassName(), createProperties(dataSourceConfig)); + public static DataSourcePoolProperties create(final DataSourceConfiguration dataSourceConfig) { + return new DataSourcePoolProperties(dataSourceConfig.getConnection().getDataSourceClassName(), createProperties(dataSourceConfig)); } /** @@ -71,8 +71,8 @@ public static DataSourceProperties create(final DataSourceConfiguration dataSour * @param dataSources data sources * @return created data source properties */ - public static Map create(final Map dataSources) { - Map result = new LinkedHashMap<>(); + public static Map create(final Map dataSources) { + Map result = new LinkedHashMap<>(); for (Entry entry : dataSources.entrySet()) { result.put(entry.getKey(), create(entry.getValue())); } @@ -85,11 +85,11 @@ public static Map create(final Map standardProperties = poolPropertySynonyms.getStandardProperties(); Long connectionTimeoutMilliseconds = toLong(standardProperties, "connectionTimeoutMilliseconds", null); Long idleTimeoutMilliseconds = toLong(standardProperties, "idleTimeoutMilliseconds", null); @@ -154,7 +154,7 @@ private static PoolConfiguration getPoolConfiguration(final PoolPropertySynonyms Integer minPoolSize = toInt(standardProperties, "minPoolSize", null); Boolean readOnly = toBoolean(standardProperties, "readOnly", null); Properties customProperties = new Properties(); - customProperties.putAll(customDataSourceProperties.getProperties()); + customProperties.putAll(customDataSourcePoolProperties.getProperties()); return new PoolConfiguration(connectionTimeoutMilliseconds, idleTimeoutMilliseconds, maxLifetimeMilliseconds, maxPoolSize, minPoolSize, readOnly, customProperties); } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesValidator.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesValidator.java similarity index 74% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesValidator.java rename to infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesValidator.java index 0906f5c196cfa..3983e73af1e48 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesValidator.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesValidator.java @@ -32,50 +32,50 @@ import java.util.Optional; /** - * Data source properties validator. + * Data source pool properties validator. */ -public final class DataSourcePropertiesValidator { +public final class DataSourcePoolPropertiesValidator { /** - * Validate data source properties map. + * Validate data source pool properties map. * - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map * @return error messages */ - public Collection validate(final Map dataSourcePropsMap) { + public Collection validate(final Map propsMap) { Collection result = new LinkedList<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { + for (Entry entry : propsMap.entrySet()) { try { validateProperties(entry.getKey(), entry.getValue()); validateConnection(entry.getKey(), entry.getValue()); - } catch (final InvalidDataSourcePropertiesException ex) { + } catch (final InvalidDataSourcePoolPropertiesException ex) { result.add(ex.getMessage()); } } return result; } - private void validateProperties(final String dataSourceName, final DataSourceProperties dataSourceProps) throws InvalidDataSourcePropertiesException { - Optional metaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSourceProps.getDataSourceClassName()); + private void validateProperties(final String dataSourceName, final DataSourcePoolProperties props) throws InvalidDataSourcePoolPropertiesException { + Optional metaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, props.getPoolClassName()); if (!metaData.isPresent()) { return; } try { - metaData.get().getDataSourcePoolPropertiesValidator().ifPresent(optional -> optional.validate(dataSourceProps)); + metaData.get().getDataSourcePoolPropertiesValidator().ifPresent(optional -> optional.validate(props)); } catch (final IllegalArgumentException ex) { - throw new InvalidDataSourcePropertiesException(dataSourceName, ex.getMessage()); + throw new InvalidDataSourcePoolPropertiesException(dataSourceName, ex.getMessage()); } } - private void validateConnection(final String dataSourceName, final DataSourceProperties dataSourceProps) throws InvalidDataSourcePropertiesException { + private void validateConnection(final String dataSourceName, final DataSourcePoolProperties props) throws InvalidDataSourcePoolPropertiesException { DataSource dataSource = null; try { - dataSource = DataSourcePoolCreator.create(dataSourceProps); + dataSource = DataSourcePoolCreator.create(props); checkFailFast(dataSource); // CHECKSTYLE:OFF } catch (final SQLException | RuntimeException ex) { // CHECKSTYLE:ON - throw new InvalidDataSourcePropertiesException(dataSourceName, ex.getMessage()); + throw new InvalidDataSourcePoolPropertiesException(dataSourceName, ex.getMessage()); } finally { if (null != dataSource) { new DataSourcePoolDestroyer(dataSource).asyncDestroy(); diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/InvalidDataSourcePropertiesException.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/InvalidDataSourcePoolPropertiesException.java similarity index 81% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/InvalidDataSourcePropertiesException.java rename to infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/InvalidDataSourcePoolPropertiesException.java index 996657612d4bc..08ca9083a9455 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/InvalidDataSourcePropertiesException.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/InvalidDataSourcePoolPropertiesException.java @@ -18,13 +18,13 @@ package org.apache.shardingsphere.infra.datasource.pool.props; /** - * Invalid data source properties exception. + * Invalid data source pool properties exception. */ -public final class InvalidDataSourcePropertiesException extends Exception { +public final class InvalidDataSourcePoolPropertiesException extends Exception { private static final long serialVersionUID = -7221138369057943935L; - public InvalidDataSourcePropertiesException(final String dataSourceName, final String errorMessage) { + public InvalidDataSourcePoolPropertiesException(final String dataSourceName, final String errorMessage) { super(String.format("Invalid data source `%s`, error message is: %s", dataSourceName, errorMessage)); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourceProperties.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePoolProperties.java similarity index 86% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourceProperties.java rename to infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePoolProperties.java index bb7327420eea5..ea44677b0f2ec 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourceProperties.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePoolProperties.java @@ -27,16 +27,16 @@ import java.util.Properties; /** - * Custom data source properties. + * Custom data source pool properties. */ @Getter @EqualsAndHashCode -public final class CustomDataSourceProperties { +public final class CustomDataSourcePoolProperties { private final Map properties; - public CustomDataSourceProperties(final Map props, - final Collection standardPropertyKeys, final Collection transientFieldNames, final Map propertySynonyms) { + public CustomDataSourcePoolProperties(final Map props, + final Collection standardPropertyKeys, final Collection transientFieldNames, final Map propertySynonyms) { properties = getProperties(props); standardPropertyKeys.forEach(properties::remove); transientFieldNames.forEach(properties::remove); diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceUtils.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceUtils.java index 21bd6b72aba79..acd8dc887bc9d 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceUtils.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceUtils.java @@ -19,8 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import javax.sql.DataSource; import java.util.LinkedHashMap; @@ -56,8 +56,8 @@ public static Map getStorageNodeDataSources(final Map getStorageUnitNodeMappers(final Map dataSources) { Map result = new LinkedHashMap<>(dataSources.size(), 1F); for (Entry entry : dataSources.entrySet()) { - DataSourceProperties dataSourceProperties = DataSourcePropertiesCreator.create(entry.getValue()); - String url = dataSourceProperties.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); + DataSourcePoolProperties dataSourcePoolProperties = DataSourcePoolPropertiesCreator.create(entry.getValue()); + String url = dataSourcePoolProperties.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); result.put(entry.getKey(), new StorageUnitNodeMapper(entry.getKey(), new StorageNode(entry.getKey()), url)); } return result; diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java index fc93e7e4d5c36..41121069ba419 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java +++ b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.infra.datasource.storage; import lombok.Getter; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import javax.sql.DataSource; import java.util.Map; @@ -29,11 +29,11 @@ @Getter public final class StorageResourceWithProperties extends StorageResource { - private final Map dataSourcePropertiesMap; + private final Map dataSourcePoolPropertiesMap; public StorageResourceWithProperties(final Map storageNodes, - final Map storageUnitNodeMappers, final Map dataSourcePropertiesMap) { + final Map storageUnitNodeMappers, final Map dataSourcePoolPropertiesMap) { super(storageNodes, storageUnitNodeMappers); - this.dataSourcePropertiesMap = dataSourcePropertiesMap; + this.dataSourcePoolPropertiesMap = dataSourcePoolPropertiesMap; } } diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java index 6a3591c155b95..69e649884d30f 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java +++ b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.infra.datasource.pool.creator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -34,14 +34,14 @@ class DataSourcePoolCreatorTest { @Test void assertCreateMap() { - Map actual = DataSourcePoolCreator.create(Collections.singletonMap("foo_ds", new DataSourceProperties(MockedDataSource.class.getName(), createProperties()))); + Map actual = DataSourcePoolCreator.create(Collections.singletonMap("foo_ds", new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties()))); assertThat(actual.size(), is(1)); assertDataSource((MockedDataSource) actual.get("foo_ds")); } @Test void assertCreate() { - assertDataSource((MockedDataSource) DataSourcePoolCreator.create(new DataSourceProperties(MockedDataSource.class.getName(), createProperties()))); + assertDataSource((MockedDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties()))); } private Map createProperties() { diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesCreatorTest.java b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesCreatorTest.java similarity index 83% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesCreatorTest.java rename to infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesCreatorTest.java index 91f56adc8a19a..06a84b3783991 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesCreatorTest.java +++ b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesCreatorTest.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.ConnectionPropertySynonyms; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.PoolPropertySynonyms; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; @@ -39,11 +39,11 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -class DataSourcePropertiesCreatorTest { +class DataSourcePoolPropertiesCreatorTest { @Test void assertCreateWithDataSourceConfiguration() { - assertParameter(DataSourcePropertiesCreator.create(createResourceConfiguration())); + assertParameter(DataSourcePoolPropertiesCreator.create(createResourceConfiguration())); } private DataSourceConfiguration createResourceConfiguration() { @@ -52,7 +52,7 @@ private DataSourceConfiguration createResourceConfiguration() { return new DataSourceConfiguration(connectionConfig, poolConfig); } - private void assertParameter(final DataSourceProperties actual) { + private void assertParameter(final DataSourcePoolProperties actual) { Map props = actual.getAllLocalProperties(); assertThat(props.size(), is(10)); assertThat(props.get("dataSourceClassName"), is("com.zaxxer.hikari.HikariDataSource")); @@ -68,20 +68,20 @@ private void assertParameter(final DataSourceProperties actual) { @Test void assertCreateWithDataSource() { - assertThat(DataSourcePropertiesCreator.create(createDataSource()), is(new DataSourceProperties(MockedDataSource.class.getName(), createProperties()))); + assertThat(DataSourcePoolPropertiesCreator.create(createDataSource()), is(new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties()))); } @Test void assertCreateConfiguration() { - DataSourceProperties dataSourceProperties = mock(DataSourceProperties.class); + DataSourcePoolProperties dataSourcePoolProperties = mock(DataSourcePoolProperties.class); ConnectionPropertySynonyms connectionPropertySynonyms = new ConnectionPropertySynonyms(createStandardProperties(), createPropertySynonyms()); PoolPropertySynonyms poolPropertySynonyms = new PoolPropertySynonyms(createStandardProperties(), createPropertySynonyms()); - CustomDataSourceProperties customDataSourceProperties = new CustomDataSourceProperties(createProperties(), + CustomDataSourcePoolProperties customDataSourcePoolProperties = new CustomDataSourcePoolProperties(createProperties(), Arrays.asList("username", "password", "closed"), Collections.singletonList("closed"), Collections.singletonMap("username", "user")); - when(dataSourceProperties.getConnectionPropertySynonyms()).thenReturn(connectionPropertySynonyms); - when(dataSourceProperties.getPoolPropertySynonyms()).thenReturn(poolPropertySynonyms); - when(dataSourceProperties.getCustomDataSourceProperties()).thenReturn(customDataSourceProperties); - DataSourcePropertiesCreator.createConfiguration(dataSourceProperties); + when(dataSourcePoolProperties.getConnectionPropertySynonyms()).thenReturn(connectionPropertySynonyms); + when(dataSourcePoolProperties.getPoolPropertySynonyms()).thenReturn(poolPropertySynonyms); + when(dataSourcePoolProperties.getCustomDataSourcePoolProperties()).thenReturn(customDataSourcePoolProperties); + DataSourcePoolPropertiesCreator.createConfiguration(dataSourcePoolProperties); } private DataSource createDataSource() { diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesTest.java b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesTest.java similarity index 71% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesTest.java rename to infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesTest.java index add130f76750f..5aa5ba21f5700 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesTest.java +++ b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesTest.java @@ -35,7 +35,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; -class DataSourcePropertiesTest { +class DataSourcePoolPropertiesTest { @SuppressWarnings("unchecked") @Test @@ -46,8 +46,8 @@ void assertGetDataSourceConfigurationWithConnectionInitSqls() { actualDataSource.setUsername("root"); actualDataSource.setPassword("root"); actualDataSource.setConnectionInitSqls(Arrays.asList("set names utf8mb4;", "set names utf8;")); - DataSourceProperties actual = DataSourcePropertiesCreator.create(actualDataSource); - assertThat(actual.getDataSourceClassName(), is(MockedDataSource.class.getName())); + DataSourcePoolProperties actual = DataSourcePoolPropertiesCreator.create(actualDataSource); + assertThat(actual.getPoolClassName(), is(MockedDataSource.class.getName())); assertThat(actual.getAllLocalProperties().get("url").toString(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getAllLocalProperties().get("username").toString(), is("root")); assertThat(actual.getAllLocalProperties().get("password").toString(), is("root")); @@ -60,8 +60,8 @@ void assertGetDataSourceConfigurationWithConnectionInitSqls() { @Test void assertGetAllLocalProperties() { - DataSourceProperties originalDataSourceProps = new DataSourceProperties(MockedDataSource.class.getName(), getProperties()); - Map actualAllProps = originalDataSourceProps.getAllLocalProperties(); + DataSourcePoolProperties originalProps = new DataSourcePoolProperties(MockedDataSource.class.getName(), getProperties()); + Map actualAllProps = originalProps.getAllLocalProperties(); assertThat(actualAllProps.size(), is(7)); assertTrue(actualAllProps.containsKey("driverClassName")); assertTrue(actualAllProps.containsValue(MockedDataSource.class.getName())); @@ -93,53 +93,53 @@ private Map getProperties() { @Test void assertEquals() { - assertThat(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")), - is(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")))); + assertThat(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")), + is(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")))); } @Test void assertNotEqualsWithNullValue() { - assertNotEquals(null, new DataSourceProperties(MockedDataSource.class.getName(), new HashMap<>())); + assertNotEquals(null, new DataSourcePoolProperties(MockedDataSource.class.getName(), new HashMap<>())); } @Test void assertNotEqualsWithDifferentDataSourceClassName() { - assertThat(new DataSourceProperties("FooDataSourceClass", new HashMap<>()), not(new DataSourceProperties("BarDataSourceClass", new HashMap<>()))); + assertThat(new DataSourcePoolProperties("FooDataSourceClass", new HashMap<>()), not(new DataSourcePoolProperties("BarDataSourceClass", new HashMap<>()))); } @Test void assertNotEqualsWithDifferentProperties() { - DataSourceProperties actual = new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("foo")); - DataSourceProperties expected = new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("bar")); + DataSourcePoolProperties actual = new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("foo")); + DataSourcePoolProperties expected = new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("bar")); assertThat(actual, not(expected)); } @Test void assertSameHashCode() { - assertThat(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode(), - is(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode())); + assertThat(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode(), + is(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode())); } @Test void assertDifferentHashCodeWithDifferentDataSourceClassName() { - assertThat(new DataSourceProperties("FooDataSourceClass", createUserProperties("foo")).hashCode(), - not(new DataSourceProperties("BarDataSourceClass", createUserProperties("foo")).hashCode())); + assertThat(new DataSourcePoolProperties("FooDataSourceClass", createUserProperties("foo")).hashCode(), + not(new DataSourcePoolProperties("BarDataSourceClass", createUserProperties("foo")).hashCode())); } @Test void assertDifferentHashCodeWithDifferentProperties() { - assertThat(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("foo")).hashCode(), - not(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("bar")).hashCode())); + assertThat(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("foo")).hashCode(), + not(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("bar")).hashCode())); } private Map createUserProperties(final String username) { Map result = new LinkedHashMap<>(2, 1F); result.put("username", username); - result.put("dataSourceProperties", getDataSourceProperties()); + result.put("dataSourceProperties", getDataSourcePoolProperties()); return result; } - private Map getDataSourceProperties() { + private Map getDataSourcePoolProperties() { Map result = new LinkedHashMap<>(3, 1F); result.put("maintainTimeStats", "false"); result.put("rewriteBatchedStatements", "true"); diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesValidatorTest.java b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesValidatorTest.java similarity index 86% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesValidatorTest.java rename to infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesValidatorTest.java index d128f51bf067b..42f7e0785161b 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePropertiesValidatorTest.java +++ b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/DataSourcePoolPropertiesValidatorTest.java @@ -26,7 +26,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; -class DataSourcePropertiesValidatorTest { +class DataSourcePoolPropertiesValidatorTest { @BeforeAll static void setUp() throws ClassNotFoundException { @@ -35,7 +35,7 @@ static void setUp() throws ClassNotFoundException { @Test void assertValidateSuccess() { - assertTrue(new DataSourcePropertiesValidator().validate( - Collections.singletonMap("name", new DataSourceProperties(HikariDataSource.class.getName(), Collections.singletonMap("jdbcUrl", "jdbc:mock")))).isEmpty()); + assertTrue(new DataSourcePoolPropertiesValidator().validate( + Collections.singletonMap("name", new DataSourcePoolProperties(HikariDataSource.class.getName(), Collections.singletonMap("jdbcUrl", "jdbc:mock")))).isEmpty()); } } diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePropertiesTest.java b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePoolPropertiesTest.java similarity index 95% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePropertiesTest.java rename to infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePoolPropertiesTest.java index 5530eb0430e1d..a7a8eb6bf492e 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePropertiesTest.java +++ b/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/custom/CustomDataSourcePoolPropertiesTest.java @@ -28,11 +28,11 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -class CustomDataSourcePropertiesTest { +class CustomDataSourcePoolPropertiesTest { @Test void assertGetProperties() { - Map actual = new CustomDataSourceProperties( + Map actual = new CustomDataSourcePoolProperties( createProperties(), Arrays.asList("username", "password", "closed"), Collections.singletonList("closed"), Collections.singletonMap("username", "user")).getProperties(); assertThat(actual.size(), is(3)); assertThat(actual.get("foo"), is("bar")); diff --git a/infra/datasource/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/c3p0/creator/C3P0DataSourcePoolCreatorTest.java b/infra/datasource/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/c3p0/creator/C3P0DataSourcePoolCreatorTest.java index 5d0c925681e3b..33b63d31a0292 100644 --- a/infra/datasource/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/c3p0/creator/C3P0DataSourcePoolCreatorTest.java +++ b/infra/datasource/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/c3p0/creator/C3P0DataSourcePoolCreatorTest.java @@ -19,7 +19,7 @@ import com.mchange.v2.c3p0.ComboPooledDataSource; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.junit.jupiter.api.Test; @@ -34,7 +34,7 @@ class C3P0DataSourcePoolCreatorTest { @Test void assertCreateDataSource() { - ComboPooledDataSource actual = (ComboPooledDataSource) DataSourcePoolCreator.create(new DataSourceProperties(ComboPooledDataSource.class.getName(), createDataSourceProperties())); + ComboPooledDataSource actual = (ComboPooledDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(ComboPooledDataSource.class.getName(), createDataSourcePoolProperties())); assertThat(actual.getJdbcUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getUser(), is("root")); assertThat(actual.getPassword(), is("root")); @@ -42,7 +42,7 @@ void assertCreateDataSource() { new PropertiesBuilder.Property("password", "root"), new PropertiesBuilder.Property("user", "root")))); } - private Map createDataSourceProperties() { + private Map createDataSourcePoolProperties() { Map result = new HashMap<>(); result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); result.put("driverClassName", MockedDataSource.class.getName()); diff --git a/infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java b/infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java index 336eb16de2321..9140ec8e0009a 100644 --- a/infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java +++ b/infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java @@ -20,7 +20,7 @@ import lombok.SneakyThrows; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -38,14 +38,14 @@ class DBCPDataSourcePoolCreatorTest { @Test void assertCreateDataSource() { - BasicDataSource actual = (BasicDataSource) DataSourcePoolCreator.create(new DataSourceProperties(BasicDataSource.class.getName(), createDataSourceProperties())); + BasicDataSource actual = (BasicDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(BasicDataSource.class.getName(), createDataSourcePoolProperties())); assertThat(actual.getUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getUsername(), is("root")); assertThat(actual.getPassword(), is("root")); assertThat(getConnectionProperties(actual), is(PropertiesBuilder.build(new Property("foo", "foo_value"), new Property("bar", "bar_value")))); } - private Map createDataSourceProperties() { + private Map createDataSourcePoolProperties() { Map result = new HashMap<>(); result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); result.put("driverClassName", MockedDataSource.class.getName()); diff --git a/infra/datasource/type/druid/pom.xml b/infra/datasource/type/druid/pom.xml deleted file mode 100644 index 479e40f11088d..0000000000000 --- a/infra/datasource/type/druid/pom.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere - shardingsphere-infra-datasource-type - 5.4.1-SNAPSHOT - - shardingsphere-infra-datasource-druid - ${project.artifactId} - - - - org.apache.shardingsphere - shardingsphere-infra-datasource-core - ${project.version} - - - - com.alibaba - druid - - - - org.apache.shardingsphere - shardingsphere-test-fixture-database - ${project.version} - test - - - org.apache.shardingsphere - shardingsphere-test-util - ${project.version} - test - - - diff --git a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java b/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java deleted file mode 100644 index 6ef71105baa39..0000000000000 --- a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.druid.metadata; - -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; - -/** - * Druid data source pool field meta data. - */ -public final class DruidDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - - @Override - public String getJdbcUrlFieldName() { - return "url"; - } - - @Override - public String getJdbcUrlPropertiesFieldName() { - return "connectionProperties"; - } -} diff --git a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java b/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java deleted file mode 100644 index 9d65164aa9479..0000000000000 --- a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.druid.metadata; - -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; - -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.Map; - -/** - * Druid data source pool meta data. - */ -public final class DruidDataSourcePoolMetaData implements DataSourcePoolMetaData { - - private static final Collection TRANSIENT_FIELD_NAMES = new LinkedList<>(); - - static { - buildTransientFieldNames(); - } - - private static void buildTransientFieldNames() { - TRANSIENT_FIELD_NAMES.add("closed"); - } - - @Override - public Map getDefaultProperties() { - return Collections.emptyMap(); - } - - @Override - public Map getSkippedProperties() { - return Collections.emptyMap(); - } - - @Override - public Map getPropertySynonyms() { - return Collections.emptyMap(); - } - - @Override - public Collection getTransientFieldNames() { - return TRANSIENT_FIELD_NAMES; - } - - @Override - public DruidDataSourcePoolFieldMetaData getFieldMetaData() { - return new DruidDataSourcePoolFieldMetaData(); - } - - @Override - public String getType() { - return "com.alibaba.druid.pool.DruidDataSource"; - } -} diff --git a/infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData deleted file mode 100644 index bda50be822fed..0000000000000 --- a/infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ /dev/null @@ -1,18 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.infra.datasource.druid.metadata.DruidDataSourcePoolMetaData diff --git a/infra/datasource/type/druid/src/test/java/org/apache/shardingsphere/infra/datasource/druid/creator/DruidDataSourcePoolCreatorTest.java b/infra/datasource/type/druid/src/test/java/org/apache/shardingsphere/infra/datasource/druid/creator/DruidDataSourcePoolCreatorTest.java deleted file mode 100644 index 367d0f33e25a6..0000000000000 --- a/infra/datasource/type/druid/src/test/java/org/apache/shardingsphere/infra/datasource/druid/creator/DruidDataSourcePoolCreatorTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.druid.creator; - -import com.alibaba.druid.pool.DruidDataSource; -import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.junit.jupiter.api.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; - -class DruidDataSourcePoolCreatorTest { - - @Test - void assertCreateDataSource() { - DruidDataSource actual = (DruidDataSource) DataSourcePoolCreator.create(new DataSourceProperties(DruidDataSource.class.getName(), createDataSourceProperties())); - assertThat(actual.getUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); - assertThat(actual.getUsername(), is("root")); - assertThat(actual.getPassword(), is("root")); - assertThat(actual.getConnectProperties(), is(PropertiesBuilder.build(new PropertiesBuilder.Property("foo", "foo_value"), new PropertiesBuilder.Property("bar", "bar_value")))); - } - - private Map createDataSourceProperties() { - Map result = new HashMap<>(); - result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); - result.put("driverClassName", MockedDataSource.class.getName()); - result.put("username", "root"); - result.put("password", "root"); - result.put("connectProperties", PropertiesBuilder.build(new PropertiesBuilder.Property("foo", "foo_value"), new PropertiesBuilder.Property("bar", "bar_value"))); - return result; - } -} diff --git a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java b/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java index 1a161550a1a1a..7f3ba18b393af 100644 --- a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java +++ b/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java @@ -19,7 +19,7 @@ import com.google.common.base.Preconditions; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -36,8 +36,8 @@ public final class HikariDataSourcePoolPropertiesValidator implements DataSource private static final long MIN_KEEP_ALIVE_TIME_MILLISECONDS = TimeUnit.SECONDS.toMillis(30L); @Override - public void validate(final DataSourceProperties dataSourceProps) { - Map allLocalProperties = dataSourceProps.getAllLocalProperties(); + public void validate(final DataSourcePoolProperties props) { + Map allLocalProperties = props.getAllLocalProperties(); validateConnectionTimeout(allLocalProperties); validateIdleTimeout(allLocalProperties); validateMaxLifetime(allLocalProperties); @@ -46,53 +46,53 @@ public void validate(final DataSourceProperties dataSourceProps) { validateKeepAliveTime(allLocalProperties); } - private void validateConnectionTimeout(final Map allLocalProperties) { - if (isExisted(allLocalProperties, "connectionTimeout")) { - long connectionTimeout = Long.parseLong(allLocalProperties.get("connectionTimeout").toString()); + private void validateConnectionTimeout(final Map allLocalProps) { + if (isExisted(allLocalProps, "connectionTimeout")) { + long connectionTimeout = Long.parseLong(allLocalProps.get("connectionTimeout").toString()); Preconditions.checkState(connectionTimeout >= MIN_CONNECTION_TIMEOUT_MILLISECONDS, "connectionTimeout can not less than %s ms.", MIN_CONNECTION_TIMEOUT_MILLISECONDS); } } - private void validateIdleTimeout(final Map allLocalProperties) { - if (isExisted(allLocalProperties, "idleTimeout")) { - long idleTimeout = Long.parseLong(allLocalProperties.get("idleTimeout").toString()); + private void validateIdleTimeout(final Map allLocalProps) { + if (isExisted(allLocalProps, "idleTimeout")) { + long idleTimeout = Long.parseLong(allLocalProps.get("idleTimeout").toString()); Preconditions.checkState(idleTimeout >= 0, "idleTimeout can not be negative."); } } - private void validateMaxLifetime(final Map allLocalProperties) { - if (isExisted(allLocalProperties, "maxLifetime")) { - long maxLifetime = Long.parseLong(allLocalProperties.get("maxLifetime").toString()); + private void validateMaxLifetime(final Map allLocalProps) { + if (isExisted(allLocalProps, "maxLifetime")) { + long maxLifetime = Long.parseLong(allLocalProps.get("maxLifetime").toString()); Preconditions.checkState(maxLifetime >= MIN_LIFETIME_MILLISECONDS, "maxLifetime can not less than %s ms.", MIN_LIFETIME_MILLISECONDS); } } - private void validateMaximumPoolSize(final Map allLocalProperties) { - if (isExisted(allLocalProperties, "maximumPoolSize")) { - int maximumPoolSize = Integer.parseInt(allLocalProperties.get("maximumPoolSize").toString()); + private void validateMaximumPoolSize(final Map allLocalProps) { + if (isExisted(allLocalProps, "maximumPoolSize")) { + int maximumPoolSize = Integer.parseInt(allLocalProps.get("maximumPoolSize").toString()); Preconditions.checkState(maximumPoolSize >= 1, "maxPoolSize can not less than 1."); } } - private void validateMinimumIdle(final Map allLocalProperties) { - if (isExisted(allLocalProperties, "minimumIdle")) { - int minimumIdle = Integer.parseInt(allLocalProperties.get("minimumIdle").toString()); + private void validateMinimumIdle(final Map allLocalProps) { + if (isExisted(allLocalProps, "minimumIdle")) { + int minimumIdle = Integer.parseInt(allLocalProps.get("minimumIdle").toString()); Preconditions.checkState(minimumIdle >= 0, "minimumIdle can not be negative."); } } - private void validateKeepAliveTime(final Map allLocalProperties) { - if (!isExisted(allLocalProperties, "keepaliveTime")) { + private void validateKeepAliveTime(final Map allLocalProps) { + if (!isExisted(allLocalProps, "keepaliveTime")) { return; } - int keepAliveTime = Integer.parseInt(allLocalProperties.get("keepaliveTime").toString()); + int keepAliveTime = Integer.parseInt(allLocalProps.get("keepaliveTime").toString()); if (0 == keepAliveTime) { return; } Preconditions.checkState(keepAliveTime >= MIN_KEEP_ALIVE_TIME_MILLISECONDS, "keepaliveTime can not be less than %s ms.", MIN_KEEP_ALIVE_TIME_MILLISECONDS); } - private boolean isExisted(final Map allLocalProperties, final String key) { - return allLocalProperties.containsKey(key) && null != allLocalProperties.get(key); + private boolean isExisted(final Map allLocalProps, final String key) { + return allLocalProps.containsKey(key) && null != allLocalProps.get(key); } } diff --git a/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java b/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java index c1d41002dd60f..7b7550c5d21c5 100644 --- a/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java +++ b/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java @@ -19,7 +19,7 @@ import com.zaxxer.hikari.HikariDataSource; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -35,14 +35,14 @@ class HikariDataSourcePoolCreatorTest { @Test void assertCreateDataSource() { - HikariDataSource actual = (HikariDataSource) DataSourcePoolCreator.create(new DataSourceProperties(HikariDataSource.class.getName(), createDataSourceProperties())); + HikariDataSource actual = (HikariDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(HikariDataSource.class.getName(), createDataSourcePoolProperties())); assertThat(actual.getJdbcUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getUsername(), is("root")); assertThat(actual.getPassword(), is("root")); assertThat(actual.getDataSourceProperties(), is(PropertiesBuilder.build(new Property("foo", "foo_value"), new Property("bar", "bar_value")))); } - private Map createDataSourceProperties() { + private Map createDataSourcePoolProperties() { Map result = new HashMap<>(); result.put("jdbcUrl", "jdbc:mock://127.0.0.1/foo_ds"); result.put("driverClassName", MockedDataSource.class.getName()); diff --git a/infra/datasource/type/pom.xml b/infra/datasource/type/pom.xml index 7872e3eb04e67..842e2a1849aa5 100644 --- a/infra/datasource/type/pom.xml +++ b/infra/datasource/type/pom.xml @@ -30,7 +30,6 @@ c3p0 dbcp - druid hikari diff --git a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePropertiesValidateHandler.java b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePoolPropertiesValidateHandler.java similarity index 77% rename from infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePropertiesValidateHandler.java rename to infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePoolPropertiesValidateHandler.java index 6d2348afa5f37..05fdcb2c3348b 100644 --- a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePropertiesValidateHandler.java +++ b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePoolPropertiesValidateHandler.java @@ -18,25 +18,25 @@ package org.apache.shardingsphere.distsql.handler.validate; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesValidator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesValidator; import java.util.Collection; import java.util.Map; /** - * Data source properties validate handler. + * Data source pool properties validate handler. */ -public final class DataSourcePropertiesValidateHandler { +public final class DataSourcePoolPropertiesValidateHandler { /** * Validate data source properties map. * - * @param dataSourcePropertiesMap data source properties map + * @param propsMap data source pool properties map * @throws InvalidStorageUnitsException invalid storage units exception */ - public void validate(final Map dataSourcePropertiesMap) { - Collection errorMessages = new DataSourcePropertiesValidator().validate(dataSourcePropertiesMap); + public void validate(final Map propsMap) { + Collection errorMessages = new DataSourcePoolPropertiesValidator().validate(propsMap); if (!errorMessages.isEmpty()) { throw new InvalidStorageUnitsException(errorMessages); } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java index 1d22709c2a967..229f9542ee4c5 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java @@ -26,7 +26,7 @@ import org.apache.shardingsphere.driver.jdbc.adapter.invocation.MethodInvocationRecorder; import org.apache.shardingsphere.driver.jdbc.core.ShardingSphereSavepoint; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.OverallConnectionNotEnoughException; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.executor.sql.prepare.driver.DatabaseConnectionManager; @@ -95,31 +95,31 @@ private Map getTrafficDataSourceMap(final String databaseNam } MetaDataBasedPersistService persistService = contextManager.getMetaDataContexts().getPersistService(); String actualDatabaseName = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(); - Map dataSourcePropsMap = persistService.getDataSourceUnitService().load(actualDatabaseName); - Preconditions.checkState(!dataSourcePropsMap.isEmpty(), "Can not get data source properties from meta data."); - DataSourceProperties dataSourcePropsSample = dataSourcePropsMap.values().iterator().next(); + Map propsMap = persistService.getDataSourceUnitService().load(actualDatabaseName); + Preconditions.checkState(!propsMap.isEmpty(), "Can not get data source properties from meta data."); + DataSourcePoolProperties propsSample = propsMap.values().iterator().next(); Collection users = persistService.getGlobalRuleService().loadUsers(); Collection instances = contextManager.getInstanceContext().getAllClusterInstances(InstanceType.PROXY, rule.getLabels()); - return DataSourcePoolCreator.create(createDataSourcePropertiesMap(instances, users, dataSourcePropsSample, actualDatabaseName)); + return DataSourcePoolCreator.create(createDataSourcePoolPropertiesMap(instances, users, propsSample, actualDatabaseName)); } - private Map createDataSourcePropertiesMap(final Collection instances, final Collection users, - final DataSourceProperties dataSourcePropsSample, final String schema) { - Map result = new LinkedHashMap<>(); + private Map createDataSourcePoolPropertiesMap(final Collection instances, final Collection users, + final DataSourcePoolProperties propsSample, final String schema) { + Map result = new LinkedHashMap<>(); for (InstanceMetaData each : instances) { - result.put(each.getId(), createDataSourceProperties((ProxyInstanceMetaData) each, users, dataSourcePropsSample, schema)); + result.put(each.getId(), createDataSourcePoolProperties((ProxyInstanceMetaData) each, users, propsSample, schema)); } return result; } - private DataSourceProperties createDataSourceProperties(final ProxyInstanceMetaData instanceMetaData, final Collection users, - final DataSourceProperties dataSourcePropsSample, final String schema) { - Map props = dataSourcePropsSample.getAllLocalProperties(); + private DataSourcePoolProperties createDataSourcePoolProperties(final ProxyInstanceMetaData instanceMetaData, final Collection users, + final DataSourcePoolProperties propsSample, final String schema) { + Map props = propsSample.getAllLocalProperties(); props.put("jdbcUrl", createJdbcUrl(instanceMetaData, schema, props)); ShardingSphereUser user = users.iterator().next(); props.put("username", user.getGrantee().getUsername()); props.put("password", user.getPassword()); - return new DataSourceProperties("com.zaxxer.hikari.HikariDataSource", props); + return new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", props); } private String createJdbcUrl(final ProxyInstanceMetaData instanceMetaData, final String schema, final Map props) { diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java index 8fd8234c802d7..dac6d686e6382 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java @@ -20,7 +20,7 @@ import com.zaxxer.hikari.HikariDataSource; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.instance.metadata.InstanceType; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; @@ -99,7 +99,7 @@ private Map mockDataSourceMap() throws SQLException { private MetaDataPersistService mockMetaDataPersistService() { MetaDataPersistService result = mock(MetaDataPersistService.class, RETURNS_DEEP_STUBS); when(result.getDataSourceUnitService().load(DefaultDatabase.LOGIC_NAME)) - .thenReturn(Collections.singletonMap(DefaultDatabase.LOGIC_NAME, new DataSourceProperties(HikariDataSource.class.getName(), createProperties()))); + .thenReturn(Collections.singletonMap(DefaultDatabase.LOGIC_NAME, new DataSourcePoolProperties(HikariDataSource.class.getName(), createProperties()))); when(result.getGlobalRuleService().loadUsers()).thenReturn(Collections.singletonList(new ShardingSphereUser("root", "root", "localhost"))); return result; } diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java index 796900fd0f099..71d94590fc1ba 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java @@ -66,7 +66,7 @@ public ShardingSpherePipelineDataSourceConfiguration(final String param) { Map props = rootConfig.getDataSources().values().iterator().next(); databaseType = DatabaseTypeFactory.get(getJdbcUrl(props)); appendJdbcQueryProperties(databaseType); - adjustDataSourceProperties(rootConfig.getDataSources()); + adjustDataSourcePoolProperties(rootConfig.getDataSources()); } public ShardingSpherePipelineDataSourceConfiguration(final YamlRootConfiguration rootConfig) { @@ -102,7 +102,7 @@ private void appendJdbcQueryProperties(final DatabaseType databaseType) { }); } - private void adjustDataSourceProperties(final Map> dataSources) { + private void adjustDataSourcePoolProperties(final Map> dataSources) { for (Map queryProps : dataSources.values()) { for (String each : Arrays.asList("minPoolSize", "minimumIdle")) { queryProps.put(each, "1"); diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java index 0d554608e8ded..207afbe5b78c8 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java @@ -26,7 +26,7 @@ import org.apache.shardingsphere.infra.database.core.connector.url.StandardJdbcUrlParser; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -51,7 +51,7 @@ public final class StandardPipelineDataSourceConfiguration implements PipelineDa @Getter private final String parameter; - private final DataSourceProperties dataSourceProperties; + private final DataSourcePoolProperties dataSourcePoolProps; @Getter private final YamlJdbcConfiguration jdbcConfig; @@ -82,7 +82,7 @@ private StandardPipelineDataSourceConfiguration(final String param, final Map backup, final Ma private void assertGetConfig(final StandardPipelineDataSourceConfiguration actual) { assertThat(actual.getDatabaseType().getType(), is("MySQL")); assertThat(actual.getType(), is(StandardPipelineDataSourceConfiguration.TYPE)); - DataSourceProperties dataSourceProps = (DataSourceProperties) actual.getDataSourceConfiguration(); - assertThat(dataSourceProps.getDataSourceClassName(), is("com.zaxxer.hikari.HikariDataSource")); + DataSourcePoolProperties props = (DataSourcePoolProperties) actual.getDataSourceConfiguration(); + assertThat(props.getPoolClassName(), is("com.zaxxer.hikari.HikariDataSource")); assertGetJdbcConfig(actual.getJdbcConfig()); - assertDataSourceProperties(dataSourceProps); + assertDataSourcePoolProperties(props); } private void assertGetJdbcConfig(final YamlJdbcConfiguration actual) { @@ -88,8 +88,8 @@ private void assertGetJdbcConfig(final YamlJdbcConfiguration actual) { assertThat(actual.getPassword(), is(PASSWORD)); } - private void assertDataSourceProperties(final DataSourceProperties dataSourceProps) { - Map actual = new YamlDataSourceConfigurationSwapper().swapToMap(dataSourceProps); + private void assertDataSourcePoolProperties(final DataSourcePoolProperties props) { + Map actual = new YamlDataSourceConfigurationSwapper().swapToMap(props); assertThat(actual.get("minPoolSize"), is("1")); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java index fa02fb655d645..0791c2a4812c7 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.data.pipeline.api.datasource.config.impl.StandardPipelineDataSourceConfiguration; import org.apache.shardingsphere.data.pipeline.spi.datasource.creator.PipelineDataSourceCreator; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import javax.sql.DataSource; @@ -31,7 +31,7 @@ public final class StandardPipelineDataSourceCreator implements PipelineDataSour @Override public DataSource createPipelineDataSource(final Object dataSourceConfig) { - return DataSourcePoolCreator.create((DataSourceProperties) dataSourceConfig); + return DataSourcePoolCreator.create((DataSourcePoolProperties) dataSourceConfig); } @Override diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java index c58af9fb1d3e8..c9a427662dc08 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -33,27 +33,27 @@ /** * Pipeline data source persist service. */ -public final class PipelineDataSourcePersistService implements PipelineMetaDataPersistService> { +public final class PipelineDataSourcePersistService implements PipelineMetaDataPersistService> { private final YamlDataSourceConfigurationSwapper swapper = new YamlDataSourceConfigurationSwapper(); @Override @SuppressWarnings("unchecked") - public Map load(final PipelineContextKey contextKey, final JobType jobType) { + public Map load(final PipelineContextKey contextKey, final JobType jobType) { String dataSourcesProps = PipelineAPIFactory.getGovernanceRepositoryAPI(contextKey).getMetaDataDataSources(jobType); if (Strings.isNullOrEmpty(dataSourcesProps)) { return Collections.emptyMap(); } Map> yamlDataSources = YamlEngine.unmarshal(dataSourcesProps, Map.class); - Map result = new LinkedHashMap<>(yamlDataSources.size(), 1F); - yamlDataSources.forEach((key, value) -> result.put(key, swapper.swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSources.size(), 1F); + yamlDataSources.forEach((key, value) -> result.put(key, swapper.swapToDataSourcePoolProperties(value))); return result; } @Override - public void persist(final PipelineContextKey contextKey, final JobType jobType, final Map dataSourcePropsMap) { - Map> dataSourceMap = new LinkedHashMap<>(dataSourcePropsMap.size(), 1F); - for (Entry entry : dataSourcePropsMap.entrySet()) { + public void persist(final PipelineContextKey contextKey, final JobType jobType, final Map propsMap) { + Map> dataSourceMap = new LinkedHashMap<>(propsMap.size(), 1F); + for (Entry entry : propsMap.entrySet()) { dataSourceMap.put(entry.getKey(), swapper.swapToMap(entry.getValue())); } PipelineAPIFactory.getGovernanceRepositoryAPI(contextKey).persistMetaDataDataSources(jobType, YamlEngine.marshal(dataSourceMap)); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java index b1a56dfc89ed8..895922c1bcc87 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java @@ -38,7 +38,7 @@ import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.parser.SQLParserEngine; @@ -183,7 +183,7 @@ public static void destroyPosition(final String jobId, final PipelineDataSourceC log.info("Cleanup database type:{}, data source type:{}", databaseType.getType(), pipelineDataSourceConfig.getType()); if (pipelineDataSourceConfig instanceof ShardingSpherePipelineDataSourceConfiguration) { ShardingSpherePipelineDataSourceConfiguration dataSourceConfig = (ShardingSpherePipelineDataSourceConfiguration) pipelineDataSourceConfig; - for (DataSourceProperties each : new YamlDataSourceConfigurationSwapper().getDataSourcePropertiesMap(dataSourceConfig.getRootConfig()).values()) { + for (DataSourcePoolProperties each : new YamlDataSourceConfigurationSwapper().getDataSourcePoolPropertiesMap(dataSourceConfig.getRootConfig()).values()) { try (PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper(DataSourcePoolCreator.create(each), databaseType)) { positionInitializer.destroy(dataSource, jobId); } @@ -193,7 +193,7 @@ public static void destroyPosition(final String jobId, final PipelineDataSourceC StandardPipelineDataSourceConfiguration dataSourceConfig = (StandardPipelineDataSourceConfiguration) pipelineDataSourceConfig; try ( PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper( - DataSourcePoolCreator.create((DataSourceProperties) dataSourceConfig.getDataSourceConfiguration()), databaseType)) { + DataSourcePoolCreator.create((DataSourcePoolProperties) dataSourceConfig.getDataSourceConfiguration()), databaseType)) { positionInitializer.destroy(dataSource, jobId); } } diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java index 61e7871021b53..a90da6b619166 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java @@ -37,10 +37,10 @@ class YamlJdbcConfigurationTest { @Test void assertConstructionWithUrl() { - assertYamlJdbcConfiguration(YamlEngine.unmarshal(YamlEngine.marshal(getDataSourcePropsWithUrl()), YamlJdbcConfiguration.class)); + assertYamlJdbcConfiguration(YamlEngine.unmarshal(YamlEngine.marshal(getDataSourcePoolPropertiesWithUrl()), YamlJdbcConfiguration.class)); } - private Map getDataSourcePropsWithUrl() { + private Map getDataSourcePoolPropertiesWithUrl() { Map result = new HashMap<>(3, 1F); result.put("url", JDBC_URL); result.put("username", USERNAME); diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java index 10ef8db4fa0be..337053cdcaa6c 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java @@ -20,14 +20,14 @@ import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; import org.apache.shardingsphere.distsql.handler.ral.update.RALUpdater; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.converter.DataSourceSegmentsConverter; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.migration.distsql.statement.RegisterMigrationSourceStorageUnitStatement; @@ -43,7 +43,7 @@ public final class RegisterMigrationSourceStorageUnitUpdater implements RALUpdat private final MigrationJobAPI jobAPI = new MigrationJobAPI(); - private final DataSourcePropertiesValidateHandler validateHandler = new DataSourcePropertiesValidateHandler(); + private final DataSourcePoolPropertiesValidateHandler validateHandler = new DataSourcePoolPropertiesValidateHandler(); @Override public void executeUpdate(final String databaseName, final RegisterMigrationSourceStorageUnitStatement sqlStatement) { @@ -52,9 +52,9 @@ public void executeUpdate(final String databaseName, final RegisterMigrationSour () -> new UnsupportedSQLOperationException("Not currently support add hostname and port, please use url")); URLBasedDataSourceSegment urlBasedDataSourceSegment = (URLBasedDataSourceSegment) dataSources.get(0); DatabaseType databaseType = DatabaseTypeFactory.get(urlBasedDataSourceSegment.getUrl()); - Map sourcePropertiesMap = DataSourceSegmentsConverter.convert(databaseType, dataSources); - validateHandler.validate(sourcePropertiesMap); - jobAPI.addMigrationSourceResources(PipelineContextKey.buildForProxy(), sourcePropertiesMap); + Map propsMap = DataSourceSegmentsConverter.convert(databaseType, dataSources); + validateHandler.validate(propsMap); + jobAPI.addMigrationSourceResources(PipelineContextKey.buildForProxy(), propsMap); } @Override diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java index 14ad2aabaf6d9..dfec3be2944d6 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java @@ -76,7 +76,7 @@ import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; @@ -160,13 +160,13 @@ private YamlCDCJobConfiguration getYamlCDCJobConfiguration(final StreamDataParam } private ShardingSpherePipelineDataSourceConfiguration getDataSourceConfiguration(final ShardingSphereDatabase database) { - Map> dataSourceProps = new HashMap<>(); - for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap().entrySet()) { - dataSourceProps.put(entry.getKey(), dataSourceConfigSwapper.swapToMap(entry.getValue())); + Map> dataSourcePoolProps = new HashMap<>(); + for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap().entrySet()) { + dataSourcePoolProps.put(entry.getKey(), dataSourceConfigSwapper.swapToMap(entry.getValue())); } YamlRootConfiguration targetRootConfig = new YamlRootConfiguration(); targetRootConfig.setDatabaseName(database.getName()); - targetRootConfig.setDataSources(dataSourceProps); + targetRootConfig.setDataSources(dataSourcePoolProps); Collection yamlRuleConfigurations = ruleConfigSwapperEngine.swapToYamlRuleConfigurations(database.getRuleMetaData().getConfigurations()); targetRootConfig.setRules(yamlRuleConfigurations); return new ShardingSpherePipelineDataSourceConfiguration(targetRootConfig); diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java index e8daa9efb0dcc..94fef5be0390b 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java @@ -83,7 +83,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.datanode.DataNode; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -137,7 +137,7 @@ public String createJobAndStart(final PipelineContextKey contextKey, final Migra private YamlMigrationJobConfiguration buildYamlJobConfiguration(final PipelineContextKey contextKey, final MigrateTableStatement param) { YamlMigrationJobConfiguration result = new YamlMigrationJobConfiguration(); result.setTargetDatabaseName(param.getTargetDatabaseName()); - Map metaDataDataSource = dataSourcePersistService.load(contextKey, new MigrationJobType()); + Map metaDataDataSource = dataSourcePersistService.load(contextKey, new MigrationJobType()); Map> sourceDataNodes = new LinkedHashMap<>(); Map configSources = new LinkedHashMap<>(); List sourceTargetEntries = new ArrayList<>(new HashSet<>(param.getSourceTargetEntries())).stream().sorted(Comparator.comparing(SourceTargetEntry::getTargetTableName) @@ -153,8 +153,8 @@ private YamlMigrationJobConfiguration buildYamlJobConfiguration(final PipelineCo } ShardingSpherePreconditions.checkState(metaDataDataSource.containsKey(dataSourceName), () -> new PipelineInvalidParameterException(dataSourceName + " doesn't exist. Run `SHOW MIGRATION SOURCE STORAGE UNITS;` to verify it.")); - Map sourceDataSourceProps = dataSourceConfigSwapper.swapToMap(metaDataDataSource.get(dataSourceName)); - StandardPipelineDataSourceConfiguration sourceDataSourceConfig = new StandardPipelineDataSourceConfiguration(sourceDataSourceProps); + Map sourceDataSourcePoolProps = dataSourceConfigSwapper.swapToMap(metaDataDataSource.get(dataSourceName)); + StandardPipelineDataSourceConfiguration sourceDataSourceConfig = new StandardPipelineDataSourceConfiguration(sourceDataSourcePoolProps); configSources.put(dataSourceName, buildYamlPipelineDataSourceConfiguration(sourceDataSourceConfig.getType(), sourceDataSourceConfig.getParameter())); DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(sourceDataSourceConfig.getDatabaseType()).getDialectDatabaseMetaData(); if (null == each.getSource().getSchemaName() && dialectDatabaseMetaData.isSchemaAvailable()) { @@ -190,13 +190,13 @@ private YamlPipelineDataSourceConfiguration buildYamlPipelineDataSourceConfigura } private PipelineDataSourceConfiguration buildTargetPipelineDataSourceConfiguration(final ShardingSphereDatabase targetDatabase) { - Map> targetDataSourceProps = new HashMap<>(); + Map> targetDataSourcePoolProps = new HashMap<>(); YamlDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlDataSourceConfigurationSwapper(); - for (Entry entry : targetDatabase.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap().entrySet()) { - Map dataSourceProps = dataSourceConfigSwapper.swapToMap(entry.getValue()); - targetDataSourceProps.put(entry.getKey(), dataSourceProps); + for (Entry entry : targetDatabase.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap().entrySet()) { + Map dataSourcePoolProps = dataSourceConfigSwapper.swapToMap(entry.getValue()); + targetDataSourcePoolProps.put(entry.getKey(), dataSourcePoolProps); } - YamlRootConfiguration targetRootConfig = buildYamlRootConfiguration(targetDatabase.getName(), targetDataSourceProps, targetDatabase.getRuleMetaData().getConfigurations()); + YamlRootConfiguration targetRootConfig = buildYamlRootConfiguration(targetDatabase.getName(), targetDataSourcePoolProps, targetDatabase.getRuleMetaData().getConfigurations()); return new ShardingSpherePipelineDataSourceConfiguration(targetRootConfig); } @@ -430,19 +430,19 @@ public void commit(final String jobId) { * Add migration source resources. * * @param contextKey context key - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map */ - public void addMigrationSourceResources(final PipelineContextKey contextKey, final Map dataSourcePropsMap) { - Map existDataSources = dataSourcePersistService.load(contextKey, getJobType()); - Collection duplicateDataSourceNames = new HashSet<>(dataSourcePropsMap.size(), 1F); - for (Entry entry : dataSourcePropsMap.entrySet()) { + public void addMigrationSourceResources(final PipelineContextKey contextKey, final Map propsMap) { + Map existDataSources = dataSourcePersistService.load(contextKey, getJobType()); + Collection duplicateDataSourceNames = new HashSet<>(propsMap.size(), 1F); + for (Entry entry : propsMap.entrySet()) { if (existDataSources.containsKey(entry.getKey())) { duplicateDataSourceNames.add(entry.getKey()); } } ShardingSpherePreconditions.checkState(duplicateDataSourceNames.isEmpty(), () -> new RegisterMigrationSourceStorageUnitException(duplicateDataSourceNames)); - Map result = new LinkedHashMap<>(existDataSources); - result.putAll(dataSourcePropsMap); + Map result = new LinkedHashMap<>(existDataSources); + result.putAll(propsMap); dataSourcePersistService.persist(contextKey, getJobType(), result); } @@ -453,7 +453,7 @@ public void addMigrationSourceResources(final PipelineContextKey contextKey, fin * @param resourceNames resource names */ public void dropMigrationSourceResources(final PipelineContextKey contextKey, final Collection resourceNames) { - Map metaDataDataSource = dataSourcePersistService.load(contextKey, getJobType()); + Map metaDataDataSource = dataSourcePersistService.load(contextKey, getJobType()); List noExistResources = resourceNames.stream().filter(each -> !metaDataDataSource.containsKey(each)).collect(Collectors.toList()); ShardingSpherePreconditions.checkState(noExistResources.isEmpty(), () -> new UnregisterMigrationSourceStorageUnitException(noExistResources)); for (String each : resourceNames) { @@ -469,11 +469,11 @@ public void dropMigrationSourceResources(final PipelineContextKey contextKey, fi * @return migration source resources */ public Collection> listMigrationSourceResources(final PipelineContextKey contextKey) { - Map dataSourcePropertiesMap = dataSourcePersistService.load(contextKey, getJobType()); - Collection> result = new ArrayList<>(dataSourcePropertiesMap.size()); - for (Entry entry : dataSourcePropertiesMap.entrySet()) { + Map propsMap = dataSourcePersistService.load(contextKey, getJobType()); + Collection> result = new ArrayList<>(propsMap.size()); + for (Entry entry : propsMap.entrySet()) { String dataSourceName = entry.getKey(); - DataSourceProperties value = entry.getValue(); + DataSourcePoolProperties value = entry.getValue(); Collection props = new LinkedList<>(); props.add(dataSourceName); String url = String.valueOf(value.getConnectionPropertySynonyms().getStandardProperties().get("url")); @@ -490,7 +490,7 @@ public Collection> listMigrationSourceResources(final Pipelin props.add(getStandardProperty(standardProps, "maxPoolSize")); props.add(getStandardProperty(standardProps, "minPoolSize")); props.add(getStandardProperty(standardProps, "readOnly")); - Map otherProps = value.getCustomDataSourceProperties().getProperties(); + Map otherProps = value.getCustomDataSourcePoolProperties().getProperties(); props.add(otherProps.isEmpty() ? "" : new Gson().toJson(otherProps)); result.add(props); } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java index b274cbdbd33d8..212ff55968af8 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataBasedPersistService; import org.apache.shardingsphere.metadata.persist.service.config.database.DatabaseBasedPersistService; @@ -52,7 +52,7 @@ public interface MetaDataBasedPersistService { * * @return persist service */ - DatabaseBasedPersistService> getDataSourceUnitService(); + DatabaseBasedPersistService> getDataSourceUnitService(); /** * Get database meta data service. diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java index ce68f84ca4267..665e7f3c654a5 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java @@ -23,8 +23,8 @@ import org.apache.shardingsphere.infra.config.rule.decorator.RuleConfigurationDecorator; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -93,11 +93,11 @@ public void persistGlobalRuleConfiguration(final Collection g @Override public void persistConfigurations(final String databaseName, final DatabaseConfiguration databaseConfigs, final Map dataSources, final Collection rules) { - Map dataSourcePropertiesMap = getDataSourcePropertiesMap(databaseConfigs); - if (dataSourcePropertiesMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { + Map propsMap = getDataSourcePoolPropertiesMap(databaseConfigs); + if (propsMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { databaseMetaDataService.addDatabase(databaseName); } else { - dataSourceUnitService.persist(databaseName, dataSourcePropertiesMap); + dataSourceUnitService.persist(databaseName, propsMap); databaseRulePersistService.persist(databaseName, decorateRuleConfigs(databaseName, dataSources, rules)); } } @@ -113,28 +113,28 @@ private Collection decorateRuleConfigs(final String databaseN return result; } - private Map getDataSourcePropertiesMap(final DatabaseConfiguration databaseConfigs) { - if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePropsMap().isEmpty()) { - return getDataSourcePropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources()); + private Map getDataSourcePoolPropertiesMap(final DatabaseConfiguration databaseConfigs) { + if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePoolPropertiesMap().isEmpty()) { + return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources()); } - return databaseConfigs.getDataSourcePropsMap(); + return databaseConfigs.getDataSourcePoolPropertiesMap(); } - private Map getDataSourcePropertiesMap(final Map storageNodeDataSources) { - Map result = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); + private Map getDataSourcePoolPropertiesMap(final Map storageNodeDataSources) { + Map result = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); for (Entry entry : storageNodeDataSources.entrySet()) { - result.put(entry.getKey().getName(), DataSourcePropertiesCreator.create(entry.getValue())); + result.put(entry.getKey().getName(), DataSourcePoolPropertiesCreator.create(entry.getValue())); } return result; } @Override public Map getEffectiveDataSources(final String databaseName, final Map databaseConfigs) { - Map persistedDataPropsMap = dataSourceUnitService.load(databaseName); + Map persistedDataPropsMap = dataSourceUnitService.load(databaseName); if (databaseConfigs.containsKey(databaseName) && !databaseConfigs.get(databaseName).getDataSources().isEmpty()) { databaseConfigs.get(databaseName).getStorageResource().getStorageNodeDataSources().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy()); } return persistedDataPropsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, - entry -> DataSourcePropertiesCreator.createConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); + entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); } } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java index 581836429466b..b921183f3616d 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java @@ -23,8 +23,8 @@ import org.apache.shardingsphere.infra.config.rule.decorator.RuleConfigurationDecorator; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -98,13 +98,12 @@ public void persistGlobalRuleConfiguration(final Collection g } @Override - public void persistConfigurations(final String databaseName, final DatabaseConfiguration databaseConfigs, - final Map dataSources, final Collection rules) { - Map dataSourcePropertiesMap = getDataSourcePropertiesMap(databaseConfigs); - if (dataSourcePropertiesMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { + public void persistConfigurations(final String databaseName, final DatabaseConfiguration databaseConfigs, final Map dataSources, final Collection rules) { + Map propsMap = getDataSourcePoolPropertiesMap(databaseConfigs); + if (propsMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { databaseMetaDataService.addDatabase(databaseName); } else { - dataSourceUnitService.persist(databaseName, dataSourcePropertiesMap); + dataSourceUnitService.persist(databaseName, propsMap); databaseRulePersistService.persist(databaseName, decorateRuleConfigs(databaseName, dataSources, rules)); } } @@ -120,17 +119,17 @@ private Collection decorateRuleConfigs(final String databaseN return result; } - private Map getDataSourcePropertiesMap(final DatabaseConfiguration databaseConfigs) { - if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePropsMap().isEmpty()) { - return getDataSourcePropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources()); + private Map getDataSourcePoolPropertiesMap(final DatabaseConfiguration databaseConfigs) { + if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePoolPropertiesMap().isEmpty()) { + return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources()); } - return databaseConfigs.getDataSourcePropsMap(); + return databaseConfigs.getDataSourcePoolPropertiesMap(); } - private Map getDataSourcePropertiesMap(final Map storageNodeDataSources) { - Map result = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); + private Map getDataSourcePoolPropertiesMap(final Map storageNodeDataSources) { + Map result = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); for (Entry entry : storageNodeDataSources.entrySet()) { - result.put(entry.getKey().getName(), DataSourcePropertiesCreator.create(entry.getValue())); + result.put(entry.getKey().getName(), DataSourcePoolPropertiesCreator.create(entry.getValue())); } return result; } @@ -144,11 +143,11 @@ private Map getDataSourcePropertiesMap(final Map getEffectiveDataSources(final String databaseName, final Map databaseConfigs) { - Map persistedDataPropsMap = dataSourceUnitService.load(databaseName); + Map persistedDataPropsMap = dataSourceUnitService.load(databaseName); if (databaseConfigs.containsKey(databaseName) && !databaseConfigs.get(databaseName).getDataSources().isEmpty()) { databaseConfigs.get(databaseName).getStorageResource().getStorageNodeDataSources().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy()); } return persistedDataPropsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, - entry -> DataSourcePropertiesCreator.createConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); + entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); } } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java index 4bb944f83f416..883d80fdf5778 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.metadata.persist.service.config.database; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; import java.util.Collection; @@ -92,8 +92,8 @@ default Collection persistConfig(String databaseName, T configs * Append data source properties map. * * @param databaseName database name - * @param toBeAppendedDataSourcePropsMap data source properties map to be appended + * @param toBeAppendedPropsMap data source pool properties map to be appended */ - default void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { + default void append(final String databaseName, final Map toBeAppendedPropsMap) { } } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java index 95257cac02ccb..d2398c1e28c6b 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.apache.shardingsphere.metadata.persist.node.DatabaseMetaDataNode; @@ -35,14 +35,14 @@ * Data source node persist service. */ @RequiredArgsConstructor -public final class DataSourceNodePersistService implements DatabaseBasedPersistService> { +public final class DataSourceNodePersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { + public void persist(final String databaseName, final Map dataSourceConfigs) { if (Strings.isNullOrEmpty(getDatabaseActiveVersion(databaseName))) { repository.persist(DatabaseMetaDataNode.getActiveVersionPath(databaseName), DEFAULT_VERSION); } @@ -50,14 +50,14 @@ public void persist(final String databaseName, final Map> swapYamlDataSourceConfiguration(final Map dataSourcePropsMap) { - return dataSourcePropsMap.entrySet().stream() + private Map> swapYamlDataSourceConfiguration(final Map propsMap) { + return propsMap.entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToMap(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @Override - public Map load(final String databaseName) { - return isExisted(databaseName) ? getDataSourceProperties(repository.getDirectly( + public Map load(final String databaseName) { + return isExisted(databaseName) ? getDataSourcePoolProperties(repository.getDirectly( DatabaseMetaDataNode.getMetaDataDataSourceNodesPath(databaseName, getDatabaseActiveVersion(databaseName)))) : new LinkedHashMap<>(); } @@ -67,13 +67,13 @@ private boolean isExisted(final String databaseName) { } @SuppressWarnings("unchecked") - private Map getDataSourceProperties(final String yamlContent) { + private Map getDataSourcePoolProperties(final String yamlContent) { Map> yamlDataSources = YamlEngine.unmarshal(yamlContent, Map.class); if (yamlDataSources.isEmpty()) { return new LinkedHashMap<>(); } - Map result = new LinkedHashMap<>(yamlDataSources.size()); - yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSources.size()); + yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(value))); return result; } @@ -81,12 +81,12 @@ private Map getDataSourceProperties(final String y * Append data source properties map. * * @param databaseName database name - * @param toBeAppendedDataSourcePropsMap data source properties map to be appended + * @param toBeAppendedPropsMap data source properties map to be appended */ @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - Map dataSourceConfigs = load(databaseName); - dataSourceConfigs.putAll(toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + Map dataSourceConfigs = load(databaseName); + dataSourceConfigs.putAll(toBeAppendedPropsMap); persist(databaseName, dataSourceConfigs); } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java index b38408abbc1ed..bc811bc34daf8 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.apache.shardingsphere.metadata.persist.node.DatabaseMetaDataNode; @@ -35,14 +35,14 @@ * Data source unit persist service. */ @RequiredArgsConstructor -public final class DataSourceUnitPersistService implements DatabaseBasedPersistService> { +public final class DataSourceUnitPersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { + public void persist(final String databaseName, final Map dataSourceConfigs) { if (Strings.isNullOrEmpty(getDatabaseActiveVersion(databaseName))) { repository.persist(DatabaseMetaDataNode.getActiveVersionPath(databaseName), DEFAULT_VERSION); } @@ -50,14 +50,14 @@ public void persist(final String databaseName, final Map> swapYamlDataSourceConfiguration(final Map dataSourcePropsMap) { - return dataSourcePropsMap.entrySet().stream() + private Map> swapYamlDataSourceConfiguration(final Map propsMap) { + return propsMap.entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToMap(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @Override - public Map load(final String databaseName) { - return isExisted(databaseName) ? getDataSourceProperties(repository.getDirectly( + public Map load(final String databaseName) { + return isExisted(databaseName) ? getDataSourcePoolProperties(repository.getDirectly( DatabaseMetaDataNode.getMetaDataDataSourceUnitsPath(databaseName, getDatabaseActiveVersion(databaseName)))) : new LinkedHashMap<>(); } @@ -67,13 +67,13 @@ private boolean isExisted(final String databaseName) { } @SuppressWarnings("unchecked") - private Map getDataSourceProperties(final String yamlContent) { + private Map getDataSourcePoolProperties(final String yamlContent) { Map> yamlDataSources = YamlEngine.unmarshal(yamlContent, Map.class); if (yamlDataSources.isEmpty()) { return new LinkedHashMap<>(); } - Map result = new LinkedHashMap<>(yamlDataSources.size()); - yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSources.size()); + yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(value))); return result; } @@ -81,12 +81,12 @@ private Map getDataSourceProperties(final String y * Append data source properties map. * * @param databaseName database name - * @param toBeAppendedDataSourcePropsMap data source properties map to be appended + * @param toBeAppendedPropsMap data source properties map to be appended */ @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - Map dataSourceConfigs = load(databaseName); - dataSourceConfigs.putAll(toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + Map dataSourceConfigs = load(databaseName); + dataSourceConfigs.putAll(toBeAppendedPropsMap); persist(databaseName, dataSourceConfigs); } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java index d411f4a628ae1..c78bedaa56404 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -39,15 +39,15 @@ * New Data source node persist service. */ @RequiredArgsConstructor -public final class NewDataSourceNodePersistService implements DatabaseBasedPersistService> { +public final class NewDataSourceNodePersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void persist(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { String activeVersion = getDataSourceActiveVersion(databaseName, entry.getKey()); List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceNodeVersionsNode(databaseName, entry.getKey())); repository.persist(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, entry.getKey(), versions.isEmpty() @@ -60,16 +60,16 @@ public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void delete(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { repository.delete(NewDatabaseMetaDataNode.getDataSourceNode(databaseName, entry.getKey())); } } @Override - public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { + public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { Collection result = new LinkedList<>(); - for (Entry entry : dataSourceConfigs.entrySet()) { + for (Entry entry : dataSourceConfigs.entrySet()) { List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceNodeVersionsNode(databaseName, entry.getKey())); String nextActiveVersion = versions.isEmpty() ? DEFAULT_VERSION : String.valueOf(Integer.parseInt(versions.get(0)) + 1); repository.persist(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, entry.getKey(), nextActiveVersion), @@ -85,12 +85,12 @@ public Collection persistConfig(final String databaseName, fina @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName) { + Map result = new LinkedHashMap<>(); for (String each : repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceNodesNode(databaseName))) { String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, each, getDataSourceActiveVersion(databaseName, each))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } } return result; @@ -98,18 +98,18 @@ public Map load(final String databaseName) { @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName, final String name) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName, final String name) { + Map result = new LinkedHashMap<>(); String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, name, getDataSourceActiveVersion(databaseName, name))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } return result; } @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - persist(databaseName, toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + persist(databaseName, toBeAppendedPropsMap); } private String getDataSourceActiveVersion(final String databaseName, final String dataSourceName) { diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java index 4f4c21e7d179b..3fcd9daa7f668 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -39,15 +39,15 @@ * New Data source unit persist service. */ @RequiredArgsConstructor -public final class NewDataSourceUnitPersistService implements DatabaseBasedPersistService> { +public final class NewDataSourceUnitPersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void persist(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { String activeVersion = getDataSourceActiveVersion(databaseName, entry.getKey()); List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceUnitVersionsNode(databaseName, entry.getKey())); repository.persist(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, entry.getKey(), versions.isEmpty() @@ -60,16 +60,16 @@ public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void delete(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { repository.delete(NewDatabaseMetaDataNode.getDataSourceUnitNode(databaseName, entry.getKey())); } } @Override - public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { + public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { Collection result = new LinkedList<>(); - for (Entry entry : dataSourceConfigs.entrySet()) { + for (Entry entry : dataSourceConfigs.entrySet()) { List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceUnitVersionsNode(databaseName, entry.getKey())); String nextActiveVersion = versions.isEmpty() ? DEFAULT_VERSION : String.valueOf(Integer.parseInt(versions.get(0)) + 1); repository.persist(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, entry.getKey(), nextActiveVersion), @@ -85,12 +85,12 @@ public Collection persistConfig(final String databaseName, fina @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName) { + Map result = new LinkedHashMap<>(); for (String each : repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceUnitsNode(databaseName))) { String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, each, getDataSourceActiveVersion(databaseName, each))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } } return result; @@ -98,18 +98,18 @@ public Map load(final String databaseName) { @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName, final String name) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName, final String name) { + Map result = new LinkedHashMap<>(); String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, name, getDataSourceActiveVersion(databaseName, name))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } return result; } @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - persist(databaseName, toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + persist(databaseName, toBeAppendedPropsMap); } private String getDataSourceActiveVersion(final String databaseName, final String dataSourceName) { diff --git a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java index f32089075b918..c39b1e9a1b611 100644 --- a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java +++ b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java @@ -18,8 +18,8 @@ package org.apache.shardingsphere.metadata.persist.service.config.database; import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.metadata.persist.service.config.database.datasource.DataSourceUnitPersistService; import org.apache.shardingsphere.mode.spi.PersistRepository; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; @@ -54,10 +54,10 @@ class DataSourceUnitPersistServiceTest { void assertLoad() { when(repository.getDirectly("/metadata/foo_db/active_version")).thenReturn("0"); when(repository.getDirectly("/metadata/foo_db/versions/0/data_sources/units")).thenReturn(readDataSourceYaml("yaml/persist/data-source.yaml")); - Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); + Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); assertThat(actual.size(), is(2)); - assertDataSourceProperties(actual.get("ds_0"), DataSourcePropertiesCreator.create(createDataSource("ds_0"))); - assertDataSourceProperties(actual.get("ds_1"), DataSourcePropertiesCreator.create(createDataSource("ds_1"))); + assertDataSourcePoolProperties(actual.get("ds_0"), DataSourcePoolPropertiesCreator.create(createDataSource("ds_0"))); + assertDataSourcePoolProperties(actual.get("ds_1"), DataSourcePoolPropertiesCreator.create(createDataSource("ds_1"))); } @SneakyThrows({IOException.class, URISyntaxException.class}) @@ -66,8 +66,8 @@ private String readDataSourceYaml(final String path) { .stream().filter(each -> !"".equals(each.trim()) && !each.startsWith("#")).map(each -> each + System.lineSeparator()).collect(Collectors.joining()); } - private void assertDataSourceProperties(final DataSourceProperties actual, final DataSourceProperties expected) { - assertThat(actual.getDataSourceClassName(), is(expected.getDataSourceClassName())); + private void assertDataSourcePoolProperties(final DataSourcePoolProperties actual, final DataSourcePoolProperties expected) { + assertThat(actual.getPoolClassName(), is(expected.getPoolClassName())); assertThat(actual.getAllLocalProperties().get("url"), is(expected.getAllLocalProperties().get("url"))); assertThat(actual.getAllLocalProperties().get("username"), is(expected.getAllLocalProperties().get("username"))); assertThat(actual.getAllLocalProperties().get("password"), is(expected.getAllLocalProperties().get("password"))); @@ -77,14 +77,14 @@ private void assertDataSourceProperties(final DataSourceProperties actual, final @Test void assertLoadWithoutPath() { when(repository.getDirectly("/metadata/foo_db/active_version")).thenReturn("0"); - Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); + Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); assertTrue(actual.isEmpty()); } @Test void assertAppend() { when(repository.getDirectly("/metadata/foo_db/active_version")).thenReturn("0"); - new DataSourceUnitPersistService(repository).append("foo_db", Collections.singletonMap("foo_ds", DataSourcePropertiesCreator.create(createDataSource("foo_ds")))); + new DataSourceUnitPersistService(repository).append("foo_db", Collections.singletonMap("foo_ds", DataSourcePoolPropertiesCreator.create(createDataSource("foo_ds")))); String expected = readDataSourceYaml("yaml/persist/data-source-foo.yaml"); verify(repository).persist("/metadata/foo_db/versions/0/data_sources/units", expected); } diff --git a/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java b/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java index 88fd42269a0a0..195940505879b 100644 --- a/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java +++ b/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java @@ -48,7 +48,7 @@ public final class DatabaseTimestampService implements TimestampService { @Override public void init(final Properties props) { - dataSource = DataSourcePoolCreator.create(new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties( + dataSource = DataSourcePoolCreator.create(new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties( props.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().toString(), Entry::getValue, (key, value) -> value)))); storageType = DatabaseTypeEngine.getStorageType(Collections.singleton(dataSource)); } diff --git a/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java b/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java index 56e2c0cf91512..06dcb7a8b82e2 100644 --- a/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java +++ b/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java @@ -21,7 +21,7 @@ import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import org.apache.shardingsphere.infra.datasource.CatalogSwitchableDataSource; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.transaction.xa.jta.datasource.properties.XADataSourceDefinition; import org.apache.shardingsphere.transaction.xa.jta.exception.XADataSourceInitializeException; @@ -78,7 +78,7 @@ private XADataSource loadXADataSource(final String xaDataSourceClassName) throws private Map getDatabaseAccessConfiguration(final DataSource dataSource) { Map result = new HashMap<>(3, 1F); - Map standardProps = DataSourcePropertiesCreator.create( + Map standardProps = DataSourcePoolPropertiesCreator.create( dataSource instanceof CatalogSwitchableDataSource ? ((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource).getAllStandardProperties(); result.put("url", dataSource instanceof CatalogSwitchableDataSource ? ((CatalogSwitchableDataSource) dataSource).getUrl() : standardProps.get("url")); result.put("user", standardProps.get("username")); diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java index b770350e28b15..0347a756402df 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java @@ -21,7 +21,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.executor.kernel.ExecutorEngine; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -117,8 +117,8 @@ public void reloadDatabaseMetaData(final String databaseName) { try { ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); ResourceMetaData currentResourceMetaData = database.getResourceMetaData(); - Map dataSourceProps = metaDataContexts.get().getPersistService().getDataSourceUnitService().load(databaseName); - SwitchingResource switchingResource = new ResourceSwitchManager().createByAlterDataSourceProps(currentResourceMetaData, dataSourceProps); + Map props = metaDataContexts.get().getPersistService().getDataSourceUnitService().load(databaseName); + SwitchingResource switchingResource = new ResourceSwitchManager().createByAlterDataSourcePoolProperties(currentResourceMetaData, props); metaDataContexts.get().getMetaData().getDatabases().putAll(configurationContextManager.renewDatabase(database, switchingResource)); MetaDataContexts reloadedMetaDataContexts = createMetaDataContexts(databaseName, switchingResource); deletedSchemaNames(databaseName, reloadedMetaDataContexts.getMetaData().getDatabase(databaseName), database); diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java index 588cac79f8c33..27e93294fbf1d 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java @@ -24,7 +24,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.scope.DatabaseRuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.datasource.storage.StorageUnitNodeMapper; @@ -74,15 +74,15 @@ public final class ConfigurationContextManager { * Register storage unit. * * @param databaseName database name - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map */ @SuppressWarnings("rawtypes") - public synchronized void registerStorageUnit(final String databaseName, final Map dataSourceProps) { + public synchronized void registerStorageUnit(final String databaseName, final Map propsMap) { try { Collection staleResourceHeldRules = getStaleResourceHeldRules(databaseName); staleResourceHeldRules.forEach(ResourceHeldRule::closeStaleResource); SwitchingResource switchingResource = - new NewResourceSwitchManager().registerStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), dataSourceProps); + new NewResourceSwitchManager().registerStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); buildNewMetaDataContext(databaseName, switchingResource); } catch (final SQLException ex) { log.error("Alter database: {} register storage unit failed", databaseName, ex); @@ -93,15 +93,15 @@ public synchronized void registerStorageUnit(final String databaseName, final Ma * Alter storage unit. * * @param databaseName database name - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map */ @SuppressWarnings("rawtypes") - public synchronized void alterStorageUnit(final String databaseName, final Map dataSourceProps) { + public synchronized void alterStorageUnit(final String databaseName, final Map propsMap) { try { Collection staleResourceHeldRules = getStaleResourceHeldRules(databaseName); staleResourceHeldRules.forEach(ResourceHeldRule::closeStaleResource); SwitchingResource switchingResource = - new NewResourceSwitchManager().alterStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), dataSourceProps); + new NewResourceSwitchManager().alterStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); buildNewMetaDataContext(databaseName, switchingResource); } catch (final SQLException ex) { log.error("Alter database: {} register storage unit failed", databaseName, ex); @@ -222,15 +222,15 @@ private MetaDataContexts createMetaDataContextsByAlterRule(final String database * Alter data source units configuration. * * @param databaseName database name - * @param dataSourcePropsMap altered data source properties map + * @param propsMap altered data source pool properties map */ @SuppressWarnings("rawtypes") - public synchronized void alterDataSourceUnitsConfiguration(final String databaseName, final Map dataSourcePropsMap) { + public synchronized void alterDataSourceUnitsConfiguration(final String databaseName, final Map propsMap) { try { Collection staleResourceHeldRules = getStaleResourceHeldRules(databaseName); staleResourceHeldRules.forEach(ResourceHeldRule::closeStaleResource); SwitchingResource switchingResource = - new ResourceSwitchManager().createByAlterDataSourceProps(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), dataSourcePropsMap); + new ResourceSwitchManager().createByAlterDataSourcePoolProperties(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); metaDataContexts.get().getMetaData().getDatabases().putAll(renewDatabase(metaDataContexts.get().getMetaData().getDatabase(databaseName), switchingResource)); // TODO Remove this logic when issue #22887 are finished. MetaDataContexts reloadMetaDataContexts = createMetaDataContexts(databaseName, false, switchingResource, null); @@ -275,7 +275,7 @@ public Map renewDatabase(final ShardingSphereDat StorageResource newStorageResource = new StorageResource(newStorageNodes, newStorageUnitNodeMappers); return Collections.singletonMap(database.getName().toLowerCase(), new ShardingSphereDatabase(database.getName(), database.getProtocolType(), - new ResourceMetaData(database.getName(), newStorageResource, database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap()), + new ResourceMetaData(database.getName(), newStorageResource, database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap()), database.getRuleMetaData(), database.getSchemas())); } @@ -350,7 +350,7 @@ public synchronized Map createChangedDatabases(f : ruleConfigs; StorageResource storageResource = new StorageResource(resourceMetaData.getStorageNodeDataSources(), resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers()); DatabaseConfiguration toBeCreatedDatabaseConfig = new DataSourceProvidedDatabaseConfiguration( - storageResource, toBeCreatedRuleConfigs, resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap()); + storageResource, toBeCreatedRuleConfigs, resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); ShardingSphereDatabase changedDatabase = createChangedDatabase(metaDataContexts.get().getMetaData().getDatabase(databaseName).getName(), internalLoadMetaData, metaDataContexts.get().getPersistService(), toBeCreatedDatabaseConfig, metaDataContexts.get().getMetaData().getProps(), instanceContext); Map result = new LinkedHashMap<>(metaDataContexts.get().getMetaData().getDatabases()); diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java index ee2fd671c8408..db77f356d0419 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.mode.manager.switcher; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.datasource.storage.StorageResourceWithProperties; @@ -41,21 +41,21 @@ public final class NewResourceSwitchManager { * Register storage unit. * * @param resourceMetaData resource meta data - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map * @return created switching resource */ - public SwitchingResource registerStorageUnit(final ResourceMetaData resourceMetaData, final Map dataSourceProps) { - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().putAll(dataSourceProps); - StorageResourceWithProperties toBeCreatedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(dataSourceProps); + public SwitchingResource registerStorageUnit(final ResourceMetaData resourceMetaData, final Map propsMap) { + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().putAll(propsMap); + StorageResourceWithProperties toBeCreatedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(propsMap); return new SwitchingResource(resourceMetaData, getRegisterNewStorageResource(resourceMetaData, toBeCreatedStorageResource), - new StorageResource(Collections.emptyMap(), Collections.emptyMap()), resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap()); + new StorageResource(Collections.emptyMap(), Collections.emptyMap())); } private StorageResource getRegisterNewStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeCreatedStorageResource) { Map storageNodes = new LinkedHashMap<>(toBeCreatedStorageResource.getStorageNodeDataSources().size(), 1F); for (StorageNode each : toBeCreatedStorageResource.getStorageNodeDataSources().keySet()) { if (!resourceMetaData.getStorageNodeDataSources().containsKey(each)) { - storageNodes.put(each, DataSourcePoolCreator.create(toBeCreatedStorageResource.getDataSourcePropertiesMap().get(each.getName()))); + storageNodes.put(each, DataSourcePoolCreator.create(toBeCreatedStorageResource.getDataSourcePoolPropertiesMap().get(each.getName()))); } } return new StorageResource(storageNodes, toBeCreatedStorageResource.getStorageUnitNodeMappers()); @@ -65,20 +65,20 @@ private StorageResource getRegisterNewStorageResource(final ResourceMetaData res * Alter storage unit. * * @param resourceMetaData resource meta data - * @param dataSourceProps data source properties + * @param props data source pool properties * @return created switching resource */ - public SwitchingResource alterStorageUnit(final ResourceMetaData resourceMetaData, final Map dataSourceProps) { - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().putAll(dataSourceProps); - StorageResourceWithProperties toBeAlteredStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(dataSourceProps); + public SwitchingResource alterStorageUnit(final ResourceMetaData resourceMetaData, final Map props) { + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().putAll(props); + StorageResourceWithProperties toBeAlteredStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(props); return new SwitchingResource(resourceMetaData, getAlterNewStorageResource(toBeAlteredStorageResource), - getStaleStorageResource(resourceMetaData, toBeAlteredStorageResource), resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap()); + getStaleStorageResource(resourceMetaData, toBeAlteredStorageResource)); } private StorageResource getAlterNewStorageResource(final StorageResourceWithProperties toBeAlteredStorageResource) { Map storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodeDataSources().size(), 1F); for (StorageNode each : toBeAlteredStorageResource.getStorageNodeDataSources().keySet()) { - storageNodes.put(each, DataSourcePoolCreator.create(toBeAlteredStorageResource.getDataSourcePropertiesMap().get(each.getName()))); + storageNodes.put(each, DataSourcePoolCreator.create(toBeAlteredStorageResource.getDataSourcePoolPropertiesMap().get(each.getName()))); } return new StorageResource(storageNodes, toBeAlteredStorageResource.getStorageUnitNodeMappers()); } @@ -101,9 +101,9 @@ private StorageResource getStaleStorageResource(final ResourceMetaData resourceM * @return created switching resource */ public SwitchingResource unregisterStorageUnit(final ResourceMetaData resourceMetaData, final String storageUnitName) { - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().remove(storageUnitName); + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().remove(storageUnitName); return new SwitchingResource(resourceMetaData, new StorageResource(Collections.emptyMap(), Collections.emptyMap()), - getToBeRemovedStaleStorageResource(resourceMetaData, storageUnitName), resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap()); + getToBeRemovedStaleStorageResource(resourceMetaData, storageUnitName)); } private StorageResource getToBeRemovedStaleStorageResource(final ResourceMetaData resourceMetaData, final String storageUnitName) { diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java index 076a4b79b8e37..e2db6de8bf5ef 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java @@ -18,8 +18,8 @@ package org.apache.shardingsphere.mode.manager.switcher; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.datasource.storage.StorageResourceWithProperties; @@ -43,67 +43,61 @@ public final class ResourceSwitchManager { * Create switching resource. * * @param resourceMetaData resource meta data - * @param toBeChangedDataSourceProps to be changed data source properties map + * @param toBeChangedPropsMap to be changed data source pool properties map * @return created switching resource */ - public SwitchingResource create(final ResourceMetaData resourceMetaData, final Map toBeChangedDataSourceProps) { - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().putAll(toBeChangedDataSourceProps); - StorageResourceWithProperties toBeChangedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeChangedDataSourceProps); - return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), - getStaleDataSources(resourceMetaData, toBeChangedStorageResource), toBeChangedDataSourceProps); + public SwitchingResource create(final ResourceMetaData resourceMetaData, final Map toBeChangedPropsMap) { + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().putAll(toBeChangedPropsMap); + StorageResourceWithProperties toBeChangedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeChangedPropsMap); + return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), getStaleDataSources(resourceMetaData, toBeChangedStorageResource)); } /** * Create switching resource by drop resource. * * @param resourceMetaData resource meta data - * @param toBeDeletedDataSourceProps to be deleted data source properties map + * @param toBeDeletedPropsMap to be deleted data source pool properties map * @return created switching resource */ - public SwitchingResource createByDropResource(final ResourceMetaData resourceMetaData, final Map toBeDeletedDataSourceProps) { - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().keySet().removeIf(toBeDeletedDataSourceProps::containsKey); - StorageResourceWithProperties toToBeRemovedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeDeletedDataSourceProps); + public SwitchingResource createByDropResource(final ResourceMetaData resourceMetaData, final Map toBeDeletedPropsMap) { + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().keySet().removeIf(toBeDeletedPropsMap::containsKey); + StorageResourceWithProperties toToBeRemovedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeDeletedPropsMap); return new SwitchingResource(resourceMetaData, new StorageResource(Collections.emptyMap(), Collections.emptyMap()), - getToBeRemovedStaleDataSources(resourceMetaData, toToBeRemovedStorageResource), - getToBeReversedDataSourcePropsMap(resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap(), toBeDeletedDataSourceProps.keySet())); - } - - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + getToBeRemovedStaleDataSources(resourceMetaData, toToBeRemovedStorageResource)); } /** - * Create switching resource by alter data source props. + * Create switching resource by alter data source pool properties. * * @param resourceMetaData resource meta data - * @param toBeChangedDataSourceProps to be changed data source properties map + * @param toBeChangedPropsMap to be changed data source pool properties map * @return created switching resource */ - public SwitchingResource createByAlterDataSourceProps(final ResourceMetaData resourceMetaData, final Map toBeChangedDataSourceProps) { - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().keySet().removeIf(each -> !toBeChangedDataSourceProps.containsKey(each)); - resourceMetaData.getStorageUnitMetaData().getDataSourcePropsMap().putAll(toBeChangedDataSourceProps); - StorageResourceWithProperties toBeChangedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeChangedDataSourceProps); + public SwitchingResource createByAlterDataSourcePoolProperties(final ResourceMetaData resourceMetaData, final Map toBeChangedPropsMap) { + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().keySet().removeIf(each -> !toBeChangedPropsMap.containsKey(each)); + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().putAll(toBeChangedPropsMap); + StorageResourceWithProperties toBeChangedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeChangedPropsMap); StorageResource staleStorageResource = getStaleDataSources(resourceMetaData, toBeChangedStorageResource); staleStorageResource.getStorageNodeDataSources() .putAll(getToBeDeletedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageResource.getStorageNodeDataSources().keySet())); staleStorageResource.getStorageUnitNodeMappers().putAll( getToBeDeletedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers(), toBeChangedStorageResource.getStorageUnitNodeMappers().keySet())); - return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), staleStorageResource, toBeChangedDataSourceProps); + return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), staleStorageResource); } private StorageResource createNewStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeChangedStorageResource) { Map storageNodes = - getNewStorageNodes(resourceMetaData, toBeChangedStorageResource.getStorageNodeDataSources(), toBeChangedStorageResource.getDataSourcePropertiesMap()); + getNewStorageNodes(resourceMetaData, toBeChangedStorageResource.getStorageNodeDataSources(), toBeChangedStorageResource.getDataSourcePoolPropertiesMap()); Map storageUnitNodeMappers = getNewStorageUnitNodeMappers(resourceMetaData, toBeChangedStorageResource.getStorageUnitNodeMappers()); return new StorageResource(storageNodes, storageUnitNodeMappers); } - private Map getNewStorageNodes(final ResourceMetaData resourceMetaData, final Map toBeChangedStorageNodes, - final Map dataSourcePropertiesMap) { + private Map getNewStorageNodes(final ResourceMetaData resourceMetaData, + final Map toBeChangedStorageNodes, final Map propsMap) { Map result = new LinkedHashMap<>(resourceMetaData.getStorageNodeDataSources()); result.keySet().removeAll(getToBeDeletedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes.keySet()).keySet()); - result.putAll(getChangedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes, dataSourcePropertiesMap)); - result.putAll(getToBeAddedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes, dataSourcePropertiesMap)); + result.putAll(getChangedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes, propsMap)); + result.putAll(getToBeAddedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes, propsMap)); return result; } @@ -115,27 +109,27 @@ private Map getNewStorageUnitNodeMappers(final Re return result; } - private Map getChangedDataSources(final Map storageNodes, final Map toBeChangedStorageNodes, - final Map dataSourcePropertiesMap) { + private Map getChangedDataSources(final Map storageNodes, + final Map toBeChangedStorageNodes, final Map propsMap) { Collection toBeChangedDataSourceNames = toBeChangedStorageNodes.keySet().stream() - .filter(each -> isModifiedDataSource(storageNodes, each, dataSourcePropertiesMap.get(each.getName()))).collect(Collectors.toList()); + .filter(each -> isModifiedDataSource(storageNodes, each, propsMap.get(each.getName()))).collect(Collectors.toList()); Map result = new LinkedHashMap<>(toBeChangedStorageNodes.size(), 1F); for (StorageNode each : toBeChangedDataSourceNames) { - result.put(each, DataSourcePoolCreator.create(dataSourcePropertiesMap.get(each.getName()))); + result.put(each, DataSourcePoolCreator.create(propsMap.get(each.getName()))); } return result; } - private boolean isModifiedDataSource(final Map originalDataSources, final StorageNode storageNode, final DataSourceProperties dataSourceProps) { - return originalDataSources.containsKey(storageNode) && !dataSourceProps.equals(DataSourcePropertiesCreator.create(originalDataSources.get(storageNode))); + private boolean isModifiedDataSource(final Map originalDataSources, final StorageNode storageNode, final DataSourcePoolProperties propsMap) { + return originalDataSources.containsKey(storageNode) && !propsMap.equals(DataSourcePoolPropertiesCreator.create(originalDataSources.get(storageNode))); } private Map getToBeAddedDataSources(final Map storageNodes, final Map toBeChangedStorageNodes, - final Map dataSourcePropertiesMap) { + final Map propsMap) { Collection toBeAddedDataSourceNames = toBeChangedStorageNodes.keySet().stream().filter(each -> !storageNodes.containsKey(each)).collect(Collectors.toList()); Map result = new LinkedHashMap<>(); for (StorageNode each : toBeAddedDataSourceNames) { - result.put(each, DataSourcePoolCreator.create(dataSourcePropertiesMap.get(each.getName()))); + result.put(each, DataSourcePoolCreator.create(propsMap.get(each.getName()))); } return result; } @@ -155,14 +149,14 @@ private StorageResource getToBeRemovedStaleDataSources(final ResourceMetaData re private StorageResource getStaleDataSources(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeChangedStorageResource) { Map storageNodes = new LinkedHashMap<>(resourceMetaData.getStorageNodeDataSources().size(), 1F); Map storageUnitNodeMappers = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers().size(), 1F); - storageNodes.putAll(getToBeChangedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageResource.getDataSourcePropertiesMap())); + storageNodes.putAll(getToBeChangedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageResource.getDataSourcePoolPropertiesMap())); storageUnitNodeMappers.putAll(getChangedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers(), toBeChangedStorageResource.getStorageUnitNodeMappers())); return new StorageResource(storageNodes, storageUnitNodeMappers); } - private Map getToBeChangedDataSources(final Map storageNodes, final Map dataSourcePropertiesMap) { + private Map getToBeChangedDataSources(final Map storageNodes, final Map propsMap) { Map result = new LinkedHashMap<>(storageNodes.size(), 1F); - for (Entry entry : dataSourcePropertiesMap.entrySet()) { + for (Entry entry : propsMap.entrySet()) { StorageNode storageNode = new StorageNode(entry.getKey()); if (isModifiedDataSource(storageNodes, storageNode, entry.getValue())) { result.put(storageNode, storageNodes.get(storageNode)); @@ -205,7 +199,6 @@ private boolean isModifiedStorageUnitNodeMapper(final Map getToBeAddedStorageUnitNodeMappers(final Map storageUnitNodeMappers, final Map toBeChangedStorageUnitNodeMappers) { - return toBeChangedStorageUnitNodeMappers.entrySet().stream() - .filter(entry -> !storageUnitNodeMappers.containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + return toBeChangedStorageUnitNodeMappers.entrySet().stream().filter(entry -> !storageUnitNodeMappers.containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java index 6be8230a3149f..e6ab235c4fdfe 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java @@ -19,11 +19,9 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageResource; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; -import java.util.Map; import java.util.Objects; /** @@ -40,9 +38,6 @@ public final class SwitchingResource { @Getter private final StorageResource staleStorageResource; - @Getter - private final Map dataSourcePropsMap; - /** * Close stale data sources. */ diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java index af5dca51d1f00..183a1df9aeebd 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java @@ -23,7 +23,7 @@ import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datanode.DataNode; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.datasource.storage.StorageResourceUtils; import org.apache.shardingsphere.infra.instance.InstanceContext; @@ -233,7 +233,7 @@ void assertAlterDataSourceConfiguration() { when(metaDataContexts.getMetaData().getDatabase("foo_db")).thenReturn(originalDatabaseMetaData); when(metaDataContexts.getMetaData().getGlobalRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); contextManager.getConfigurationContextManager().alterDataSourceUnitsConfiguration("foo_db", - Collections.singletonMap("foo_ds", new DataSourceProperties(MockedDataSource.class.getName(), createProperties("test", "test")))); + Collections.singletonMap("foo_ds", new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties("test", "test")))); assertThat(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getDataSources().size(), is(3)); assertAlteredDataSource((MockedDataSource) contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db") .getResourceMetaData().getStorageNodeDataSources().get(new StorageNode("foo_ds"))); diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java index 30431edb77492..464a8927902e1 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.mode.manager.switcher; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.storage.StorageNode; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; @@ -41,18 +41,18 @@ class ResourceSwitchManagerTest { @Test void assertCreate() { Map dataSourceMap = createDataSourceMap(); - SwitchingResource actual = new ResourceSwitchManager().create(new ResourceMetaData("sharding_db", dataSourceMap), createToBeChangedDataSourcePropsMap()); + SwitchingResource actual = new ResourceSwitchManager().create(new ResourceMetaData("sharding_db", dataSourceMap), createToBeChangedDataSourcePoolPropertiesMap()); assertNewDataSources(actual); actual.closeStaleDataSources(); assertStaleDataSources(dataSourceMap); } @Test - void assertCreateByAlterDataSourceProps() { + void assertCreateByAlterDataSourcePoolProperties() { Map dataSourceMap = new HashMap<>(3, 1F); dataSourceMap.put("ds_0", new MockedDataSource()); dataSourceMap.put("ds_1", new MockedDataSource()); - SwitchingResource actual = new ResourceSwitchManager().createByAlterDataSourceProps(new ResourceMetaData("sharding_db", dataSourceMap), Collections.emptyMap()); + SwitchingResource actual = new ResourceSwitchManager().createByAlterDataSourcePoolProperties(new ResourceMetaData("sharding_db", dataSourceMap), Collections.emptyMap()); assertTrue(actual.getNewStorageResource().getStorageNodeDataSources().isEmpty()); assertThat(actual.getStaleStorageResource().getStorageNodeDataSources().size(), is(2)); actual.closeStaleDataSources(); @@ -67,17 +67,17 @@ private Map createDataSourceMap() { return result; } - private Map createToBeChangedDataSourcePropsMap() { - Map result = new HashMap<>(3, 1F); - result.put("new", new DataSourceProperties(MockedDataSource.class.getName(), getDataSourceProps(2))); - result.put("not_change", new DataSourceProperties(MockedDataSource.class.getName(), getDataSourceProps(2))); - Map replaceProps = getDataSourceProps(3); + private Map createToBeChangedDataSourcePoolPropertiesMap() { + Map result = new HashMap<>(3, 1F); + result.put("new", new DataSourcePoolProperties(MockedDataSource.class.getName(), getDataSourcePoolProperties(2))); + result.put("not_change", new DataSourcePoolProperties(MockedDataSource.class.getName(), getDataSourcePoolProperties(2))); + Map replaceProps = getDataSourcePoolProperties(3); replaceProps.put("password", "new_pwd"); - result.put("replace", new DataSourceProperties(MockedDataSource.class.getName(), replaceProps)); + result.put("replace", new DataSourcePoolProperties(MockedDataSource.class.getName(), replaceProps)); return result; } - private Map getDataSourceProps(final int initialCapacity) { + private Map getDataSourcePoolProperties(final int initialCapacity) { Map result = new LinkedHashMap<>(initialCapacity, 1F); result.put("url", new MockedDataSource().getUrl()); result.put("username", "root"); diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java index bfd163b17fb05..e72bea2174219 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java @@ -36,7 +36,7 @@ void assertCloseStaleDataSources() { ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); StorageResource newStorageResource = new StorageResource(Collections.singletonMap(new StorageNode("new_ds"), new MockedDataSource()), Collections.emptyMap()); StorageResource staleStorageResource = new StorageResource(Collections.singletonMap(new StorageNode("stale_ds"), staleDataSource), Collections.emptyMap()); - new SwitchingResource(resourceMetaData, newStorageResource, staleStorageResource, Collections.emptyMap()).closeStaleDataSources(); + new SwitchingResource(resourceMetaData, newStorageResource, staleStorageResource).closeStaleDataSources(); verify(resourceMetaData).close(staleDataSource); } } diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java index 4b87aa551a661..9ee625f59d890 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.mode.manager.cluster; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; @@ -88,23 +88,23 @@ public void alterSchemaMetaData(final AlterSchemaMetaDataPOJO alterSchemaMetaDat } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) { - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeRegisterStorageUnitProps); + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) { + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeRegisteredProps); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) { - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeUpdatedStorageUnitProps); + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) { + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeUpdatedProps); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) { contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist(databaseName, - getToBeReversedDataSourcePropsMap(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); + getToBeReversedDataSourcePoolPropertiesMap(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); } - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeReversedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } @Override diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java index 6f9f42bb5e85c..b88e89f024953 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.mode.manager.cluster; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; @@ -98,27 +98,27 @@ public void alterSchemaMetaData(final AlterSchemaMetaDataPOJO alterSchemaMetaDat } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) { - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persistConfig(databaseName, toBeRegisterStorageUnitProps); + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) { + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persistConfig(databaseName, toBeRegisteredProps); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) { - DatabaseBasedPersistService> dataSourceService = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService(); - contextManager.getMetaDataContexts().getPersistService().getMetaDataVersionPersistService().switchActiveVersion(dataSourceService.persistConfig(databaseName, toBeUpdatedStorageUnitProps)); + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) { + DatabaseBasedPersistService> dataSourceService = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService(); + contextManager.getMetaDataContexts().getPersistService().getMetaDataVersionPersistService().switchActiveVersion(dataSourceService.persistConfig(databaseName, toBeUpdatedProps)); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) { contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().delete(databaseName, - getToBeDroppedDataSourcePropsMap(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); + getToBeDroppedDataSourcePoolProperties(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); } - private Map getToBeDroppedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - Map result = new LinkedHashMap<>(); + private Map getToBeDroppedDataSourcePoolProperties(final Map propsMap, final Collection toBeDroppedResourceNames) { + Map result = new LinkedHashMap<>(); for (String each : toBeDroppedResourceNames) { - if (dataSourcePropsMap.containsKey(each)) { - result.put(each, dataSourcePropsMap.get(each)); + if (propsMap.containsKey(each)) { + result.put(each, propsMap.get(each)); } } return result; diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java index b53935643821e..b38fcfb2554c6 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java @@ -19,7 +19,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.event.GovernanceEvent; import java.util.Map; @@ -35,5 +35,5 @@ public final class DataSourceNodesChangedEvent implements GovernanceEvent { private final String databaseVersion; - private final Map dataSourcePropertiesMap; + private final Map dataSourcePoolPropertiesMap; } diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java index e17403db7bc19..ae1a8f81a2f04 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java @@ -19,8 +19,8 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.event.GovernanceEvent; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; import java.util.Map; @@ -35,5 +35,5 @@ public final class DataSourceUnitsChangedEvent implements GovernanceEvent { private final String databaseVersion; - private final Map dataSourcePropertiesMap; + private final Map dataSourcePoolPropertiesMap; } diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java index 13dbf8df45f68..c3cec993a06eb 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java @@ -20,7 +20,7 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.schema.builder.SystemSchemaBuilderRule; import org.apache.shardingsphere.infra.yaml.config.pojo.rule.YamlRuleConfiguration; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -160,21 +160,21 @@ private Optional createRuleAndDataSourceChangedEvent(final Data @SuppressWarnings("unchecked") private DataSourceUnitsChangedEvent createDataSourceUnitsChangedEvent(final String databaseName, final String databaseVersion, final DataChangedEvent event) { Map> yamlDataSources = YamlEngine.unmarshal(event.getValue(), Map.class); - Map dataSourcePropertiesMap = yamlDataSources.isEmpty() + Map propsMap = yamlDataSources.isEmpty() ? new HashMap<>() : yamlDataSources.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); - return new DataSourceUnitsChangedEvent(databaseName, databaseVersion, dataSourcePropertiesMap); + Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + return new DataSourceUnitsChangedEvent(databaseName, databaseVersion, propsMap); } @SuppressWarnings("unchecked") private DataSourceNodesChangedEvent createDataSourceNodesChangedEvent(final String databaseName, final String databaseVersion, final DataChangedEvent event) { Map> yamlDataSources = YamlEngine.unmarshal(event.getValue(), Map.class); - Map dataSourcePropertiesMap = yamlDataSources.isEmpty() + Map propsMap = yamlDataSources.isEmpty() ? new HashMap<>() : yamlDataSources.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); - return new DataSourceNodesChangedEvent(databaseName, databaseVersion, dataSourcePropertiesMap); + Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + return new DataSourceNodesChangedEvent(databaseName, databaseVersion, propsMap); } @SuppressWarnings("unchecked") diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java index a2ec8a703d563..c10adcd1f19f3 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java @@ -60,7 +60,7 @@ public ConfigurationChangedSubscriber(final RegistryCenter registryCenter, final */ @Subscribe public synchronized void renew(final DataSourceUnitsChangedEvent event) { - contextManager.getConfigurationContextManager().alterDataSourceUnitsConfiguration(event.getDatabaseName(), event.getDataSourcePropertiesMap()); + contextManager.getConfigurationContextManager().alterDataSourceUnitsConfiguration(event.getDatabaseName(), event.getDataSourcePoolPropertiesMap()); disableDataSources(); } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java index 4b5d2a135fb0d..87e295ea854a4 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.instance.metadata.jdbc.JDBCInstanceMetaData; @@ -224,7 +224,7 @@ void assertAlterStorageUnitsWithProcessListClusterPerRepoFix() { } @Test - void assertAlterStorageUnitsWithDataSourceProperties() { + void assertAlterStorageUnitsWithDataSourcePoolProperties() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); MetaDataPersistService persistService = new MetaDataPersistService(new ClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); @@ -233,12 +233,11 @@ void assertAlterStorageUnitsWithDataSourceProperties() { ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(); - stringDataSourcePropertiesMap.put("active_version", new DataSourceProperties("active_version", new HashMap<>())); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(); + propsMap.put("active_version", new DataSourcePoolProperties("active_version", new HashMap<>())); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test @@ -251,70 +250,60 @@ void assertAlterStorageUnitsInvalidName() { ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(); - stringDataSourcePropertiesMap.put("\n", new DataSourceProperties("\n", new HashMap<>())); - stringDataSourcePropertiesMap.put("active_version", new DataSourceProperties("active_version", new HashMap<>())); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(); + propsMap.put("\n", new DataSourcePoolProperties("\n", new HashMap<>())); + propsMap.put("active_version", new DataSourcePoolProperties("active_version", new HashMap<>())); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test - void assertAlterStorageUnitsWithoutDataSourceProperties() { + void assertAlterStorageUnitsWithoutDataSourcePoolProperties() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); Map databases = new HashMap<>(); RuleMetaData globalRuleMetaData = new RuleMetaData(new LinkedList<>()); - MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, - new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), globalRuleMetaData, new ConfigurationProperties(new Properties()))); + MetaDataContexts metaDataContexts = new MetaDataContexts( + persistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), globalRuleMetaData, new ConfigurationProperties(new Properties()))); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", new TreeMap<>())); } @Test - void assertAlterStorageUnitsWithEmptyDataSourcePropertiesMap() { + void assertAlterStorageUnitsWithEmptyDataSourcePoolPropertiesMap() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(new TreeMap<>()); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(new TreeMap<>()); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test - void assertAlterStorageUnitsWithOneDataSourceProperties() { + void assertAlterStorageUnitsWithOneDataSourcePoolProperties() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(); - stringDataSourcePropertiesMap.put("42", new DataSourceProperties("active_version", new HashMap<>())); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(); + propsMap.put("42", new DataSourcePoolProperties("active_version", new HashMap<>())); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test @@ -324,29 +313,24 @@ void assertUnregisterStorageUnits() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.unregisterStorageUnits("db", new LinkedList<>())); } @Test void assertUnregisterStorageUnitsWithProcessListClusterPersistRepoFixture() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.unregisterStorageUnits("db", new LinkedList<>())); } @@ -357,11 +341,10 @@ void assertUnregisterStorageUnitsWithClusterPersistRepoFixture() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfiguration = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); clusterModeContextManager.setContextManagerAware( - new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfiguration, modeContextManager, null, new EventBusContext()))); + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", new LinkedList<>())); } @@ -372,15 +355,13 @@ void assertAlterRuleConfiguration() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigs)); } private ShardingSphereMetaData createShardingSphereMetaData() { @@ -396,12 +377,10 @@ void assertAlterRuleConfigurationMultiple() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); Collection ruleConfigurationList = new LinkedList<>(); ruleConfigurationList.add(new RuleConfigurationFixture()); ruleConfigurationList.add(new RuleConfigurationFixture()); @@ -411,20 +390,17 @@ void assertAlterRuleConfigurationMultiple() { @Test void assertAlterRuleConfigurationWithPersistService() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigs)); } @Test @@ -434,12 +410,10 @@ void assertAlterGlobalRuleConfigurationWithEmptyRuleConfigurations() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(new LinkedList<>())); } @@ -450,15 +424,13 @@ void assertAlterGlobalRuleConfigurationWithSingleRuleConfigurations() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigs)); } @Test @@ -468,16 +440,14 @@ void assertAlterGlobalRuleConfigurationWithMultipleRuleConfigurations() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigs)); } @Test @@ -487,12 +457,10 @@ void assertAlterProperties() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterProperties(new Properties())); } @@ -502,8 +470,7 @@ void assertConstructor() { try (MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData())) { ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); assertDoesNotThrow(() -> new ClusterModeContextManager().setContextManagerAware( new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext())))); diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java index 300ea1f75d888..a140ff0eea0da 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java @@ -25,8 +25,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -134,16 +134,16 @@ void assertRenewForRuleConfigurationsChanged() { @Test void assertRenewForDataSourceChanged() { - subscriber.renew(new DataSourceUnitsChangedEvent("db", "0", createChangedDataSourcePropertiesMap())); + subscriber.renew(new DataSourceUnitsChangedEvent("db", "0", createChangedDataSourcePoolPropertiesMap())); assertTrue(contextManager.getMetaDataContexts().getMetaData().getDatabase("db").getResourceMetaData().getDataSources().containsKey("ds_2")); } - private Map createChangedDataSourcePropertiesMap() { + private Map createChangedDataSourcePoolPropertiesMap() { MockedDataSource dataSource = new MockedDataSource(); - Map result = new LinkedHashMap<>(3, 1F); - result.put("primary_ds", DataSourcePropertiesCreator.create(dataSource)); - result.put("ds_1", DataSourcePropertiesCreator.create(dataSource)); - result.put("ds_2", DataSourcePropertiesCreator.create(dataSource)); + Map result = new LinkedHashMap<>(3, 1F); + result.put("primary_ds", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("ds_1", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("ds_2", DataSourcePoolPropertiesCreator.create(dataSource)); return result; } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java index 58196126b1ec9..eaf1262978c2a 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java @@ -20,8 +20,8 @@ import org.apache.shardingsphere.infra.config.mode.ModeConfiguration; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -113,18 +113,18 @@ private Map createDatabases() { @Test void assertRenewForDatabaseAdded() { - when(persistService.getDataSourceUnitService().load("db_added")).thenReturn(createDataSourcePropertiesMap()); + when(persistService.getDataSourceUnitService().load("db_added")).thenReturn(createDataSourcePoolPropertiesMap()); when(persistService.getDatabaseRulePersistService().load("db_added")).thenReturn(Collections.emptyList()); subscriber.renew(new DatabaseAddedEvent("db_added")); assertNotNull(contextManager.getMetaDataContexts().getMetaData().getDatabase("db_added").getResourceMetaData().getDataSources()); } - private Map createDataSourcePropertiesMap() { + private Map createDataSourcePoolPropertiesMap() { MockedDataSource dataSource = new MockedDataSource(); - Map result = new LinkedHashMap<>(3, 1F); - result.put("primary_ds", DataSourcePropertiesCreator.create(dataSource)); - result.put("replica_ds_0", DataSourcePropertiesCreator.create(dataSource)); - result.put("replica_ds_1", DataSourcePropertiesCreator.create(dataSource)); + Map result = new LinkedHashMap<>(3, 1F); + result.put("primary_ds", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("replica_ds_0", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("replica_ds_1", DataSourcePoolPropertiesCreator.create(dataSource)); return result; } diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java index edfe3280a8f96..7539ce9bd1d94 100644 --- a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; @@ -207,40 +207,40 @@ private void refreshMetaDataHeldRule(final ShardingSphereDatabase database) { } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) throws SQLException { + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) throws SQLException { SwitchingResource switchingResource = - new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisterStorageUnitProps); + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisteredProps); contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, false, switchingResource, null)); contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getSchemas() .forEach((schemaName, schema) -> contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService() .persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), schemaName, schema)); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeRegisterStorageUnitProps); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeRegisteredProps); clearServiceCache(); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) throws SQLException { + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) throws SQLException { SwitchingResource switchingResource = - new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedStorageUnitProps); + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedProps); contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, true, switchingResource, null)); contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeUpdatedStorageUnitProps); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeUpdatedProps); switchingResource.closeStaleDataSources(); clearServiceCache(); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) throws SQLException { - Map dataSourcePropsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() + Map propsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() .load(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName()); - Map toBeDeletedDataSourcePropsMap = getToBeDeletedDataSourcePropsMap(dataSourcePropsMap, toBeDroppedStorageUnitNames); + Map toBeDeletedPropsMap = getToBeDeletedPropertiesMap(propsMap, toBeDroppedStorageUnitNames); SwitchingResource switchingResource = - new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedDataSourcePropsMap); + new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedPropsMap); contextManager.getMetaDataContexts().getMetaData().getDatabases() .putAll(contextManager.getConfigurationContextManager().renewDatabase(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName), switchingResource)); MetaDataContexts reloadMetaDataContexts = contextManager.getConfigurationContextManager().createMetaDataContexts(databaseName, false, switchingResource, null); @@ -248,19 +248,19 @@ public void unregisterStorageUnits(final String databaseName, final Collection toBeReversedDataSourcePropsMap = getToBeReversedDataSourcePropsMap(dataSourcePropsMap, toBeDroppedStorageUnitNames); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeReversedDataSourcePropsMap); + Map toBeReversedPropsMap = getToBeReversedDataSourcePoolPropertiesMap(propsMap, toBeDroppedStorageUnitNames); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeReversedPropsMap); switchingResource.closeStaleDataSources(); clearServiceCache(); } - private Map getToBeDeletedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeDeletedPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeReversedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } @Override diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java index 25ba07217d691..2e94198259ca2 100644 --- a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; @@ -204,40 +204,40 @@ private void refreshMetaDataHeldRule(final ShardingSphereDatabase database) { } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) throws SQLException { + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) throws SQLException { SwitchingResource switchingResource = - new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisterStorageUnitProps); + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisteredProps); contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, false, switchingResource, null)); contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getSchemas() .forEach((schemaName, schema) -> contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService() .persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), schemaName, schema)); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeRegisterStorageUnitProps); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeRegisteredProps); clearServiceCache(); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) throws SQLException { + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) throws SQLException { SwitchingResource switchingResource = - new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedStorageUnitProps); + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedProps); contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, true, switchingResource, null)); contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeUpdatedStorageUnitProps); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeUpdatedProps); switchingResource.closeStaleDataSources(); clearServiceCache(); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) throws SQLException { - Map dataSourcePropsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() + Map propsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() .load(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName()); - Map toBeDeletedDataSourcePropsMap = getToBeDeletedDataSourcePropsMap(dataSourcePropsMap, toBeDroppedStorageUnitNames); + Map toBeDeletedPropsMap = getToBeDeletedDataSourcePoolPropertiesMap(propsMap, toBeDroppedStorageUnitNames); SwitchingResource switchingResource = - new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedDataSourcePropsMap); + new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedPropsMap); contextManager.getMetaDataContexts().getMetaData().getDatabases() .putAll(contextManager.getConfigurationContextManager().renewDatabase(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName), switchingResource)); MetaDataContexts reloadMetaDataContexts = contextManager.getConfigurationContextManager().createMetaDataContexts(databaseName, false, switchingResource, null); @@ -245,19 +245,19 @@ public void unregisterStorageUnits(final String databaseName, final Collection toBeReversedDataSourcePropsMap = getToBeReversedDataSourcePropsMap(dataSourcePropsMap, toBeDroppedStorageUnitNames); + Map toBeReversedPropsMap = getToBeReversedDataSourcePoolPropertiesMap(propsMap, toBeDroppedStorageUnitNames); contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeReversedDataSourcePropsMap); + toBeReversedPropsMap); switchingResource.closeStaleDataSources(); clearServiceCache(); } - private Map getToBeDeletedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeDeletedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeReversedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } @Override diff --git a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java index 34f5a0d24fdbd..73bd228cf54d4 100644 --- a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java +++ b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java @@ -23,7 +23,7 @@ import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import java.util.Collection; import java.util.LinkedHashMap; @@ -42,10 +42,10 @@ public final class DataSourceSegmentsConverter { * @param dataSourceSegments data source segments * @return data source properties map */ - public static Map convert(final DatabaseType databaseType, final Collection dataSourceSegments) { - Map result = new LinkedHashMap<>(dataSourceSegments.size(), 1F); + public static Map convert(final DatabaseType databaseType, final Collection dataSourceSegments) { + Map result = new LinkedHashMap<>(dataSourceSegments.size(), 1F); for (DataSourceSegment each : dataSourceSegments) { - result.put(each.getName(), new DataSourceProperties("com.zaxxer.hikari.HikariDataSource", createProperties(databaseType, each))); + result.put(each.getName(), new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", createProperties(databaseType, each))); } return result; } diff --git a/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java b/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java index 58b4adfc5b4cb..1c90f7b035de4 100644 --- a/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java +++ b/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -41,7 +41,7 @@ class DataSourceSegmentsConverterTest { @Test void assertConvert() { - Map actual = DataSourceSegmentsConverter.convert(TypedSPILoader.getService(DatabaseType.class, "MySQL"), createDataSourceSegments()); + Map actual = DataSourceSegmentsConverter.convert(TypedSPILoader.getService(DatabaseType.class, "MySQL"), createDataSourceSegments()); assertThat(actual.size(), is(2)); assertTrue(actual.keySet().containsAll(Arrays.asList("ds0", "ds1"))); assertThat(actual.values().iterator().next().getAllLocalProperties().get("username"), is("root0")); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java index 2355e15f8ef69..1be11bde6b506 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java @@ -22,7 +22,7 @@ import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine; import org.apache.shardingsphere.proxy.backend.config.ProxyConfiguration; import org.apache.shardingsphere.proxy.backend.config.ProxyGlobalConfiguration; @@ -66,7 +66,7 @@ private ProxyGlobalConfiguration swapGlobalConfiguration(final YamlProxyServerCo private Map swapDataSources(final Map yamlDataSourceConfigs) { Map dataSourceConfigs = swapDataSourceConfigurations(yamlDataSourceConfigs); - return DataSourcePoolCreator.create(DataSourcePropertiesCreator.createFromConfiguration(dataSourceConfigs)); + return DataSourcePoolCreator.create(DataSourcePoolPropertiesCreator.createFromConfiguration(dataSourceConfigs)); } private Map swapDatabaseConfigurations(final Map databaseConfigurations) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java index 0e0ce65789d1b..d2c1d5cd24b12 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java @@ -26,9 +26,9 @@ import org.apache.shardingsphere.encrypt.api.config.CompatibleEncryptRuleConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.custom.CustomDataSourcePoolProperties; import org.apache.shardingsphere.infra.datasource.pool.props.synonym.PoolPropertySynonyms; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.spi.type.ordered.OrderedSPILoader; @@ -121,8 +121,8 @@ private void appendResources(final Map while (iterator.hasNext()) { Entry entry = iterator.next(); DataSourceConfiguration dataSourceConfig = dataSourceConfigSwapper.swap(entry.getValue()); - DataSourceProperties dataSourceProps = DataSourcePropertiesCreator.create(dataSourceConfig); - appendResource(entry.getKey(), dataSourceProps, stringBuilder); + DataSourcePoolProperties props = DataSourcePoolPropertiesCreator.create(dataSourceConfig); + appendResource(entry.getKey(), props, stringBuilder); if (iterator.hasNext()) { stringBuilder.append(DistSQLScriptConstants.COMMA); } @@ -130,12 +130,12 @@ private void appendResources(final Map stringBuilder.append(DistSQLScriptConstants.SEMI).append(System.lineSeparator()).append(System.lineSeparator()); } - private void appendResource(final String resourceName, final DataSourceProperties dataSourceProps, final StringBuilder stringBuilder) { - Map connectionProps = dataSourceProps.getConnectionPropertySynonyms().getStandardProperties(); + private void appendResource(final String resourceName, final DataSourcePoolProperties dataSourcePoolProps, final StringBuilder stringBuilder) { + Map connectionProps = dataSourcePoolProps.getConnectionPropertySynonyms().getStandardProperties(); String url = (String) connectionProps.get(DistSQLScriptConstants.KEY_URL); String username = (String) connectionProps.get(DistSQLScriptConstants.KEY_USERNAME); String password = (String) connectionProps.get(DistSQLScriptConstants.KEY_PASSWORD); - String props = getResourceProperties(dataSourceProps.getPoolPropertySynonyms(), dataSourceProps.getCustomDataSourceProperties()); + String props = getResourceProperties(dataSourcePoolProps.getPoolPropertySynonyms(), dataSourcePoolProps.getCustomDataSourcePoolProperties()); if (Strings.isNullOrEmpty(password)) { stringBuilder.append(String.format(DistSQLScriptConstants.RESOURCE_DEFINITION_WITHOUT_PASSWORD, resourceName, url, username, props)); } else { @@ -143,12 +143,12 @@ private void appendResource(final String resourceName, final DataSourcePropertie } } - private String getResourceProperties(final PoolPropertySynonyms poolPropertySynonyms, final CustomDataSourceProperties customDataSourceProps) { + private String getResourceProperties(final PoolPropertySynonyms poolPropertySynonyms, final CustomDataSourcePoolProperties customDataSourcePoolProps) { StringBuilder result = new StringBuilder(); appendProperties(poolPropertySynonyms.getStandardProperties(), result); - if (!customDataSourceProps.getProperties().isEmpty()) { + if (!customDataSourcePoolProps.getProperties().isEmpty()) { result.append(DistSQLScriptConstants.COMMA); - appendProperties(customDataSourceProps.getProperties(), result); + appendProperties(customDataSourcePoolProps.getProperties(), result); } return result.toString(); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java index c8f2b3c351dd6..c0e4c5dee31dc 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.distsql.handler.ral.query.MetaDataRequiredQueryableRALExecutor; import org.apache.shardingsphere.distsql.parser.statement.ral.queryable.ExportStorageNodesStatement; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -93,7 +93,7 @@ private Map> generateDatabaseExportStora if (storageNodes.containsKey(databaseInstanceIp)) { continue; } - Map standardProperties = DataSourcePropertiesCreator.create(entry.getValue()).getConnectionPropertySynonyms().getStandardProperties(); + Map standardProperties = DataSourcePoolPropertiesCreator.create(entry.getValue()).getConnectionPropertySynonyms().getStandardProperties(); ExportedStorageNode exportedStorageNode = new ExportedStorageNode(connectionProps.getHostname(), String.valueOf(connectionProps.getPort()), String.valueOf(standardProperties.get("username")), String.valueOf(standardProperties.get("password")), connectionProps.getCatalog()); storageNodes.put(databaseInstanceIp, exportedStorageNode); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java index 43ad53c521dca..f831defc1cd63 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; @@ -30,8 +30,8 @@ import org.apache.shardingsphere.infra.database.core.connector.url.JdbcUrl; import org.apache.shardingsphere.infra.database.core.connector.url.StandardJdbcUrlParser; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.ShardingSphereExternalException; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -56,21 +56,21 @@ public final class AlterStorageUnitBackendHandler extends StorageUnitDefinitionB private final DatabaseType databaseType; - private final DataSourcePropertiesValidateHandler validateHandler; + private final DataSourcePoolPropertiesValidateHandler validateHandler; public AlterStorageUnitBackendHandler(final AlterStorageUnitStatement sqlStatement, final ConnectionSession connectionSession) { super(sqlStatement, connectionSession); databaseType = connectionSession.getProtocolType(); - validateHandler = new DataSourcePropertiesValidateHandler(); + validateHandler = new DataSourcePoolPropertiesValidateHandler(); } @Override public ResponseHeader execute(final String databaseName, final AlterStorageUnitStatement sqlStatement) { checkSQLStatement(databaseName, sqlStatement); - Map dataSourcePropsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); - validateHandler.validate(dataSourcePropsMap); + Map propsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); + validateHandler.validate(propsMap); try { - ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().alterStorageUnits(databaseName, dataSourcePropsMap); + ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().alterStorageUnits(databaseName, propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { log.error("Alter storage unit failed", ex); throw new InvalidStorageUnitsException(Collections.singleton(ex.getMessage())); @@ -128,7 +128,7 @@ private boolean isIdenticalDatabase(final DataSourceSegment segment, final DataS port = String.valueOf(segmentJdbcUrl.getPort()); database = segmentJdbcUrl.getDatabase(); } - String url = String.valueOf(DataSourcePropertiesCreator.create(dataSource).getConnectionPropertySynonyms().getStandardProperties().get("url")); + String url = String.valueOf(DataSourcePoolPropertiesCreator.create(dataSource).getConnectionPropertySynonyms().getStandardProperties().get("url")); JdbcUrl dataSourceJdbcUrl = new StandardJdbcUrlParser().parse(url); return Objects.equals(hostName, dataSourceJdbcUrl.getHostname()) && Objects.equals(port, String.valueOf(dataSourceJdbcUrl.getPort())) && Objects.equals(database, dataSourceJdbcUrl.getDatabase()); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java index ef292f2a5877d..5cfe30192c35e 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java @@ -20,12 +20,12 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.converter.DataSourceSegmentsConverter; import org.apache.shardingsphere.distsql.parser.statement.rdl.create.RegisterStorageUnitStatement; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.ShardingSphereExternalException; @@ -50,30 +50,30 @@ public final class RegisterStorageUnitBackendHandler extends StorageUnitDefiniti private final DatabaseType databaseType; - private final DataSourcePropertiesValidateHandler validateHandler; + private final DataSourcePoolPropertiesValidateHandler validateHandler; public RegisterStorageUnitBackendHandler(final RegisterStorageUnitStatement sqlStatement, final ConnectionSession connectionSession) { super(sqlStatement, connectionSession); databaseType = connectionSession.getProtocolType(); - validateHandler = new DataSourcePropertiesValidateHandler(); + validateHandler = new DataSourcePoolPropertiesValidateHandler(); } @Override public ResponseHeader execute(final String databaseName, final RegisterStorageUnitStatement sqlStatement) { checkSQLStatement(databaseName, sqlStatement); - Map dataSourcePropsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); + Map propsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); if (sqlStatement.isIfNotExists()) { Collection currentStorageUnits = getCurrentStorageUnitNames(databaseName); Collection logicalDataSourceNames = getLogicalDataSourceNames(databaseName); - dataSourcePropsMap.keySet().removeIf(currentStorageUnits::contains); - dataSourcePropsMap.keySet().removeIf(logicalDataSourceNames::contains); + propsMap.keySet().removeIf(currentStorageUnits::contains); + propsMap.keySet().removeIf(logicalDataSourceNames::contains); } - if (dataSourcePropsMap.isEmpty()) { + if (propsMap.isEmpty()) { return new UpdateResponseHeader(sqlStatement); } - validateHandler.validate(dataSourcePropsMap); + validateHandler.validate(propsMap); try { - ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, dataSourcePropsMap); + ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { log.error("Register storage unit failed", ex); throw new InvalidStorageUnitsException(Collections.singleton(ex.getMessage())); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java index a2d90487b2ea8..51a72486d4a30 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java @@ -25,8 +25,8 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.datasource.CatalogSwitchableDataSource; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; @@ -67,14 +67,13 @@ public Collection getColumnNames() { @Override public Collection getRows(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { ResourceMetaData resourceMetaData = database.getResourceMetaData(); - Map dataSourcePropsMap = getDataSourcePropsMap(database, sqlStatement); Collection result = new LinkedList<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { + for (Entry entry : getDataSourcePoolPropertiesMap(database, sqlStatement).entrySet()) { String key = entry.getKey(); - DataSourceProperties dataSourceProps = entry.getValue(); + DataSourcePoolProperties props = entry.getValue(); ConnectionProperties connectionProps = resourceMetaData.getConnectionProperties(key); - Map standardProps = dataSourceProps.getPoolPropertySynonyms().getStandardProperties(); - Map otherProps = dataSourceProps.getCustomDataSourceProperties().getProperties(); + Map standardProps = props.getPoolPropertySynonyms().getStandardProperties(); + Map otherProps = props.getCustomDataSourcePoolProperties().getProperties(); result.add(new LocalDataQueryResultRow(key, resourceMetaData.getStorageType(key).getType(), connectionProps.getHostname(), @@ -91,34 +90,34 @@ public Collection getRows(final ShardingSphereDatabase return result; } - private Map getDataSourcePropsMap(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { - Map result = new LinkedHashMap<>(database.getResourceMetaData().getDataSources().size(), 1F); - Map dataSourcePropsMap = database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap(); + private Map getDataSourcePoolPropertiesMap(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { + Map result = new LinkedHashMap<>(database.getResourceMetaData().getDataSources().size(), 1F); + Map propsMap = database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap(); Map storageTypes = database.getResourceMetaData().getStorageTypes(); - Optional usageCountOptional = sqlStatement.getUsageCount(); - if (usageCountOptional.isPresent()) { + Optional usageCount = sqlStatement.getUsageCount(); + if (usageCount.isPresent()) { Map> inUsedStorageUnits = StorageUnitUtils.getInUsedStorageUnits(database.getRuleMetaData(), database.getResourceMetaData().getDataSources().size()); for (Entry entry : database.getResourceMetaData().getDataSources().entrySet()) { Integer currentUsageCount = inUsedStorageUnits.containsKey(entry.getKey()) ? inUsedStorageUnits.get(entry.getKey()).size() : 0; - if (usageCountOptional.get().equals(currentUsageCount)) { - result.put(entry.getKey(), getDataSourceProperties(dataSourcePropsMap, entry.getKey(), storageTypes.get(entry.getKey()), entry.getValue())); + if (usageCount.get().equals(currentUsageCount)) { + result.put(entry.getKey(), getDataSourcePoolProperties(propsMap, entry.getKey(), storageTypes.get(entry.getKey()), entry.getValue())); } } } else { for (Entry entry : database.getResourceMetaData().getDataSources().entrySet()) { - result.put(entry.getKey(), getDataSourceProperties(dataSourcePropsMap, entry.getKey(), storageTypes.get(entry.getKey()), entry.getValue())); + result.put(entry.getKey(), getDataSourcePoolProperties(propsMap, entry.getKey(), storageTypes.get(entry.getKey()), entry.getValue())); } } return result; } - private DataSourceProperties getDataSourceProperties(final Map dataSourcePropsMap, final String storageUnitName, - final DatabaseType databaseType, final DataSource dataSource) { - DataSourceProperties result = getDataSourceProperties(dataSource); + private DataSourcePoolProperties getDataSourcePoolProperties(final Map propsMap, final String storageUnitName, + final DatabaseType databaseType, final DataSource dataSource) { + DataSourcePoolProperties result = getDataSourcePoolProperties(dataSource); DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); - if (dialectDatabaseMetaData.isInstanceConnectionAvailable() && dataSourcePropsMap.containsKey(storageUnitName)) { - DataSourceProperties unitDataSourceProperties = dataSourcePropsMap.get(storageUnitName); - for (Entry entry : unitDataSourceProperties.getPoolPropertySynonyms().getStandardProperties().entrySet()) { + if (dialectDatabaseMetaData.isInstanceConnectionAvailable() && propsMap.containsKey(storageUnitName)) { + DataSourcePoolProperties unitDataSourcePoolProperties = propsMap.get(storageUnitName); + for (Entry entry : unitDataSourcePoolProperties.getPoolPropertySynonyms().getStandardProperties().entrySet()) { if (null != entry.getValue()) { result.getPoolPropertySynonyms().getStandardProperties().put(entry.getKey(), entry.getValue()); } @@ -127,10 +126,10 @@ private DataSourceProperties getDataSourceProperties(final Map standardProps, final String key) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java index 4f2409acb9220..ff081ca10643f 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java @@ -21,7 +21,7 @@ import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.scope.DatabaseRuleConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.spi.type.ordered.OrderedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; @@ -83,20 +83,20 @@ private static void appendDatabaseName(final String databaseName, final StringBu } private static void appendDataSourceConfigurations(final ShardingSphereDatabase database, final StringBuilder stringBuilder) { - if (database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap().isEmpty()) { + if (database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap().isEmpty()) { return; } stringBuilder.append("dataSources:").append(System.lineSeparator()); - for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap().entrySet()) { + for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap().entrySet()) { appendDataSourceConfiguration(entry.getKey(), entry.getValue(), stringBuilder); } } - private static void appendDataSourceConfiguration(final String name, final DataSourceProperties dataSourceProps, final StringBuilder stringBuilder) { + private static void appendDataSourceConfiguration(final String name, final DataSourcePoolProperties props, final StringBuilder stringBuilder) { stringBuilder.append(" ").append(name).append(':').append(System.lineSeparator()); - dataSourceProps.getConnectionPropertySynonyms().getStandardProperties() + props.getConnectionPropertySynonyms().getStandardProperties() .forEach((key, value) -> stringBuilder.append(" ").append(key).append(": ").append(value).append(System.lineSeparator())); - for (Entry entry : dataSourceProps.getPoolPropertySynonyms().getStandardProperties().entrySet()) { + for (Entry entry : props.getPoolPropertySynonyms().getStandardProperties().entrySet()) { if (null != entry.getValue()) { stringBuilder.append(" ").append(entry.getKey()).append(": ").append(entry.getValue()).append(System.lineSeparator()); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java index d1c12f51c64e0..2251b968c36cd 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java @@ -24,7 +24,7 @@ import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.DistSQLException; import org.apache.shardingsphere.distsql.handler.exception.datasource.MissingRequiredDataSourcesException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.encrypt.api.config.CompatibleEncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.api.config.EncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.rule.EncryptRule; @@ -37,8 +37,8 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; @@ -106,7 +106,7 @@ public final class YamlDatabaseConfigurationImportExecutor { private final YamlProxyDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlProxyDataSourceConfigurationSwapper(); - private final DataSourcePropertiesValidateHandler validateHandler = new DataSourcePropertiesValidateHandler(); + private final DataSourcePoolPropertiesValidateHandler validateHandler = new DataSourcePoolPropertiesValidateHandler(); /** * Import proxy database from yaml configuration. @@ -147,19 +147,19 @@ private void addDatabase(final String databaseName) { } private void addResources(final String databaseName, final Map yamlDataSourceMap) { - Map dataSourcePropsMap = new LinkedHashMap<>(yamlDataSourceMap.size(), 1F); + Map propsMap = new LinkedHashMap<>(yamlDataSourceMap.size(), 1F); for (Entry entry : yamlDataSourceMap.entrySet()) { DataSourceConfiguration dataSourceConfig = dataSourceConfigSwapper.swap(entry.getValue()); - dataSourcePropsMap.put(entry.getKey(), DataSourcePropertiesCreator.create(dataSourceConfig)); + propsMap.put(entry.getKey(), DataSourcePoolPropertiesCreator.create(dataSourceConfig)); } - validateHandler.validate(dataSourcePropsMap); + validateHandler.validate(propsMap); try { - ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, dataSourcePropsMap); + ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, propsMap); } catch (final SQLException ex) { throw new InvalidStorageUnitsException(Collections.singleton(ex.getMessage())); } Map dataSource = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources(); - dataSourcePropsMap.forEach((key, value) -> dataSource.put(key, DataSourcePoolCreator.create(value))); + propsMap.forEach((key, value) -> dataSource.put(key, DataSourcePoolCreator.create(value))); } private void addRules(final String databaseName, final Collection yamlRuleConfigs) { diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java index ef9a05ea7fd54..9bde4ffd498fe 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java @@ -20,7 +20,7 @@ import lombok.SneakyThrows; import org.apache.shardingsphere.distsql.parser.statement.ral.queryable.ExportDatabaseConfigurationStatement; import org.apache.shardingsphere.infra.config.algorithm.AlgorithmConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration; @@ -66,7 +66,7 @@ void assertGetColumns() { @Test void assertExecute() { when(database.getName()).thenReturn("normal_db"); - when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap()).thenReturn(DataSourcePropertiesCreator.create(createDataSourceMap())); + when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap()).thenReturn(DataSourcePoolPropertiesCreator.create(createDataSourceMap())); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.singleton(createShardingRuleConfiguration())); Collection actual = new ExportDatabaseConfigurationExecutor().getRows(database, new ExportDatabaseConfigurationStatement(mock(DatabaseSegment.class), null)); assertThat(actual.size(), is(1)); @@ -77,7 +77,7 @@ void assertExecute() { @Test void assertExecuteWithEmptyDatabase() { when(database.getName()).thenReturn("empty_db"); - when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap()).thenReturn(Collections.emptyMap()); + when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap()).thenReturn(Collections.emptyMap()); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ExportDatabaseConfigurationStatement sqlStatement = new ExportDatabaseConfigurationStatement(new DatabaseSegment(0, 0, new IdentifierValue("empty_db")), null); Collection actual = new ExportDatabaseConfigurationExecutor().getRows(database, sqlStatement); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java index 5166d3ef01573..5005b20fa270c 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java @@ -27,7 +27,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; @@ -107,7 +107,7 @@ void assertExecuteWithEmptyMetaData() { when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Collections.singleton("empty_metadata")); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singleton("empty_metadata")); - when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap()).thenReturn(Collections.emptyMap()); + when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap()).thenReturn(Collections.emptyMap()); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ExportMetaDataStatement sqlStatement = new ExportMetaDataStatement(null); Collection actual = new ExportMetaDataExecutor().getRows(contextManager.getMetaDataContexts().getMetaData(), sqlStatement); @@ -131,7 +131,7 @@ void assertExecute() { when(database.getName()).thenReturn("normal_db"); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singleton("empty_metadata")); Map dataSourceMap = createDataSourceMap(); - when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePropsMap()).thenReturn(DataSourcePropertiesCreator.create(dataSourceMap)); + when(database.getResourceMetaData().getStorageUnitMetaData().getDataSourcePoolPropertiesMap()).thenReturn(DataSourcePoolPropertiesCreator.create(dataSourceMap)); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java index 97b42e17c20df..14d3f6b6a1b11 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java @@ -20,7 +20,7 @@ import lombok.SneakyThrows; import org.apache.shardingsphere.distsql.handler.exception.datasource.MissingRequiredDataSourcesException; import org.apache.shardingsphere.distsql.handler.exception.rule.DuplicateRuleException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.statement.ral.updatable.ImportDatabaseConfigurationStatement; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; @@ -119,7 +119,7 @@ private void init(final String databaseName) { importDatabaseConfigUpdater = new ImportDatabaseConfigurationUpdater(); YamlDatabaseConfigurationImportExecutor databaseConfigImportExecutor = new YamlDatabaseConfigurationImportExecutor(); Plugins.getMemberAccessor().set(importDatabaseConfigUpdater.getClass().getDeclaredField("databaseConfigImportExecutor"), importDatabaseConfigUpdater, databaseConfigImportExecutor); - Plugins.getMemberAccessor().set(databaseConfigImportExecutor.getClass().getDeclaredField("validateHandler"), databaseConfigImportExecutor, mock(DataSourcePropertiesValidateHandler.class)); + Plugins.getMemberAccessor().set(databaseConfigImportExecutor.getClass().getDeclaredField("validateHandler"), databaseConfigImportExecutor, mock(DataSourcePoolPropertiesValidateHandler.class)); } private ContextManager mockContextManager(final String databaseName) { diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java index 15ea177183afd..3820321cb3637 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; @@ -70,7 +70,7 @@ void setUp() throws ReflectiveOperationException { when(connectionSession.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); handler = new AlterStorageUnitBackendHandler(mock(AlterStorageUnitStatement.class), connectionSession); Plugins.getMemberAccessor().set( - handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePropertiesValidateHandler.class)); + handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePoolPropertiesValidateHandler.class)); } @Test diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java index 1f13fc4474c15..d4e12d5c851b1 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java @@ -19,7 +19,7 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; @@ -73,7 +73,7 @@ void setUp() throws ReflectiveOperationException { when(connectionSession.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(database.getRuleMetaData()).thenReturn(mock(RuleMetaData.class)); handler = new RegisterStorageUnitBackendHandler(mock(RegisterStorageUnitStatement.class), connectionSession); - Plugins.getMemberAccessor().set(handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePropertiesValidateHandler.class)); + Plugins.getMemberAccessor().set(handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePoolPropertiesValidateHandler.class)); } @Test diff --git a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java index 7e5c43146a3e4..fce5a0912e65c 100644 --- a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java +++ b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java @@ -33,7 +33,7 @@ public final class YamlHBaseConfiguration implements YamlConfiguration { private String databaseName; - private Map commonDataSourceProps; + private Map commonDataSourcePoolProps; private Map dataSources = new HashMap<>(); diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java index d6ccc3fbd5517..838cc63344b0d 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java @@ -47,7 +47,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.pool.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.DataSourcePoolProperties; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.migration.distsql.statement.MigrateTableStatement; @@ -105,7 +105,7 @@ static void beforeClass() { props.put("jdbcUrl", jdbcUrl); props.put("username", "root"); props.put("password", "root"); - jobAPI.addMigrationSourceResources(PipelineContextUtils.getContextKey(), Collections.singletonMap("ds_0", new DataSourceProperties("com.zaxxer.hikari.HikariDataSource", props))); + jobAPI.addMigrationSourceResources(PipelineContextUtils.getContextKey(), Collections.singletonMap("ds_0", new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", props))); } @AfterAll @@ -279,7 +279,7 @@ void assertRenewJobStatus() { @Test void assertAddMigrationSourceResources() { PipelineDataSourcePersistService persistService = new PipelineDataSourcePersistService(); - Map actual = persistService.load(PipelineContextUtils.getContextKey(), new MigrationJobType()); + Map actual = persistService.load(PipelineContextUtils.getContextKey(), new MigrationJobType()); assertTrue(actual.containsKey("ds_0")); } @@ -315,10 +315,10 @@ void assertCreateJobConfig() throws SQLException { } private void initIntPrimaryEnvironment() throws SQLException { - Map metaDataDataSource = new PipelineDataSourcePersistService().load(PipelineContextUtils.getContextKey(), new MigrationJobType()); - DataSourceProperties dataSourceProps = metaDataDataSource.get("ds_0"); + Map metaDataDataSource = new PipelineDataSourcePersistService().load(PipelineContextUtils.getContextKey(), new MigrationJobType()); + DataSourcePoolProperties props = metaDataDataSource.get("ds_0"); try ( - PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper(DataSourcePoolCreator.create(dataSourceProps), databaseType); + PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper(DataSourcePoolCreator.create(props), databaseType); Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.execute("DROP TABLE IF EXISTS t_order");