Skip to content

Commit

Permalink
Refactor StorageNodeName
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu committed Sep 27, 2023
1 parent 5d2ca78 commit f07257f
Show file tree
Hide file tree
Showing 21 changed files with 120 additions and 120 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.StorageResource;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapperUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

import javax.sql.DataSource;
Expand Down Expand Up @@ -56,8 +56,8 @@ public DataSourceGeneratedDatabaseConfiguration(final Map<String, DataSourceConf
storageResource = new StorageResource(getStorageNodeDataSourceMap(mappers), mappers);
}

private Map<StorageNodeIdentifier, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNodeIdentifier, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
private Map<StorageNodeName, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNodeName, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : mappers.entrySet()) {
result.computeIfAbsent(entry.getValue().getStorageNode().getName(),
key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitMetaData;
Expand All @@ -43,7 +43,7 @@
@Getter
public final class ResourceMetaData {

private final Map<StorageNodeIdentifier, DataSource> dataSourceMap;
private final Map<StorageNodeName, DataSource> dataSourceMap;

private final StorageUnitMetaData storageUnitMetaData;

Expand All @@ -54,7 +54,7 @@ public ResourceMetaData(final Map<String, DataSource> dataSources) {
StorageUnitNodeMapperUtils.fromDataSources(dataSources));
}

public ResourceMetaData(final String databaseName, final Map<StorageNodeIdentifier, DataSource> dataSourceMap,
public ResourceMetaData(final String databaseName, final Map<StorageNodeName, DataSource> dataSourceMap,
final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers, final Map<String, DataSourcePoolProperties> propsMap) {
this.dataSourceMap = dataSourceMap;
storageUnitMetaData = new StorageUnitMetaData(databaseName, dataSourceMap, propsMap, storageUnitNodeMappers);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

import javax.sql.DataSource;
Expand All @@ -34,13 +34,13 @@
@Getter
public final class StorageResource {

private final Map<StorageNodeIdentifier, DataSource> dataSourceMap;
private final Map<StorageNodeName, DataSource> dataSourceMap;

private final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers;

private final Map<String, DataSource> wrappedDataSources;

public StorageResource(final Map<StorageNodeIdentifier, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
public StorageResource(final Map<StorageNodeName, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
this.dataSourceMap = dataSourceMap;
this.storageUnitNodeMappers = storageUnitNodeMappers;
wrappedDataSources = createWrappedDataSources();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
@Getter
public final class StorageNode {

private final StorageNodeIdentifier name;
private final StorageNodeName name;

private final String url;

Expand All @@ -36,7 +36,7 @@ public StorageNode(final String name, final String url) {
}

public StorageNode(final String name, final String url, final String catalog) {
this.name = new StorageNodeIdentifier(name);
this.name = new StorageNodeName(name);
this.url = url;
this.catalog = catalog;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@
import lombok.RequiredArgsConstructor;

/**
* Storage node identifier.
* Storage node name.
*/
@RequiredArgsConstructor
@Getter
public final class StorageNodeIdentifier {
public final class StorageNodeName {

private final String name;

@Override
public boolean equals(final Object obj) {
return obj instanceof StorageNodeIdentifier && ((StorageNodeIdentifier) obj).name.equalsIgnoreCase(name);
return obj instanceof StorageNodeName && ((StorageNodeName) obj).name.equalsIgnoreCase(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ public final class StorageNodeUtils {
* @param dataSources data sources
* @return storage node data sources
*/
public static Map<StorageNodeIdentifier, DataSource> getStorageNodeDataSources(final Map<String, DataSource> dataSources) {
public static Map<StorageNodeName, DataSource> getStorageNodeDataSources(final Map<String, DataSource> dataSources) {
return dataSources.entrySet().stream().collect(
Collectors.toMap(entry -> new StorageNodeIdentifier(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSources.size(), 1F)));
Collectors.toMap(entry -> new StorageNodeName(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSources.size(), 1F)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager;

import javax.sql.DataSource;
Expand All @@ -54,39 +54,39 @@ public final class StorageUnit {

private final ConnectionProperties connectionProperties;

public StorageUnit(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources,
public StorageUnit(final String databaseName, final Map<StorageNodeName, DataSource> storageNodeDataSources,
final DataSourcePoolProperties props, final StorageUnitNodeMapper unitNodeMapper) {
this.dataSourcePoolProperties = props;
this.unitNodeMapper = unitNodeMapper;
dataSource = getStorageUnitDataSource(storageNodeDataSources, unitNodeMapper);
Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources);
Map<StorageNodeName, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources);
storageType = createStorageType(enabledStorageNodeDataSources, unitNodeMapper);
connectionProperties = createConnectionProperties(enabledStorageNodeDataSources, unitNodeMapper, storageType).orElse(null);
}

private DataSource getStorageUnitDataSource(final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources, final StorageUnitNodeMapper mapper) {
private DataSource getStorageUnitDataSource(final Map<StorageNodeName, DataSource> storageNodeDataSources, final StorageUnitNodeMapper mapper) {
StorageNode storageNode = mapper.getStorageNode();
DataSource dataSource = storageNodeDataSources.get(storageNode.getName());
return new CatalogSwitchableDataSource(dataSource, storageNode.getCatalog(), storageNode.getUrl());
}

private Map<StorageNodeIdentifier, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources) {
private Map<StorageNodeName, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNodeName, DataSource> storageNodeDataSources) {
Map<String, DataSource> toBeCheckedDataSources = new LinkedHashMap<>(storageNodeDataSources.size(), 1F);
for (Entry<StorageNodeIdentifier, DataSource> entry : storageNodeDataSources.entrySet()) {
for (Entry<StorageNodeName, DataSource> entry : storageNodeDataSources.entrySet()) {
toBeCheckedDataSources.put(entry.getKey().getName(), entry.getValue());
}
Map<String, DataSource> enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, toBeCheckedDataSources);
return storageNodeDataSources.entrySet().stream()
.filter(entry -> enabledDataSources.containsKey(entry.getKey().getName())).collect(Collectors.toMap(Entry::getKey, Entry::getValue));
}

private DatabaseType createStorageType(final Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
private DatabaseType createStorageType(final Map<StorageNodeName, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode().getName())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode().getName()))
: Collections.emptyList());
}

private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources,
private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNodeName, DataSource> enabledStorageNodeDataSources,
final StorageUnitNodeMapper mapper, final DatabaseType storageType) {
StorageNode storageNode = mapper.getStorageNode();
if (!enabledStorageNodeDataSources.containsKey(storageNode.getName())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;

import javax.sql.DataSource;
import java.util.LinkedHashMap;
Expand All @@ -41,7 +41,7 @@ public final class StorageUnitMetaData {
// TODO zhangliang: should refactor
private final Map<String, DataSource> dataSources;

public StorageUnitMetaData(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources,
public StorageUnitMetaData(final String databaseName, final Map<StorageNodeName, DataSource> storageNodeDataSources,
final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageUnitNodeMapper> unitNodeMappers) {
this.unitNodeMappers = unitNodeMappers;
storageUnits = new LinkedHashMap<>(unitNodeMappers.size(), 1F);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;

import javax.sql.DataSource;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -106,14 +106,14 @@ private static StorageUnitNodeMapper createStorageUnitNodeMapper(final String st
* @param storageUnitDataSourcePoolProps storage unit grouped data source pool properties map
* @return storage node grouped data source pool properties map
*/
public static Map<StorageNodeIdentifier, DataSourcePoolProperties> getStorageNodeDataSourcePoolProperties(final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Map<StorageNodeIdentifier, DataSourcePoolProperties> result = new LinkedHashMap<>();
public static Map<StorageNodeName, DataSourcePoolProperties> getStorageNodeDataSourcePoolProperties(final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Map<StorageNodeName, DataSourcePoolProperties> result = new LinkedHashMap<>();
for (Entry<String, DataSourcePoolProperties> entry : storageUnitDataSourcePoolProps.entrySet()) {
Map<String, Object> standardProps = entry.getValue().getConnectionPropertySynonyms().getStandardProperties();
String url = standardProps.get("url").toString();
boolean isInstanceConnectionAvailable = new DatabaseTypeRegistry(DatabaseTypeFactory.get(url)).getDialectDatabaseMetaData().isInstanceConnectionAvailable();
StorageNodeIdentifier storageNodeIdentifier = new StorageNodeIdentifier(getStorageNodeName(entry.getKey(), url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
result.putIfAbsent(storageNodeIdentifier, entry.getValue());
StorageNodeName storageNodeName = new StorageNodeName(getStorageNodeName(entry.getKey(), url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
result.putIfAbsent(storageNodeName, entry.getValue());
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration;
import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration;
import org.junit.jupiter.api.Test;

Expand All @@ -50,7 +50,7 @@ void assertGetDataSources() {
@Test
void assertGetStorageNodes() {
DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration();
HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getDataSourceMap().get(new StorageNodeIdentifier("normal_db"));
HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getDataSourceMap().get(new StorageNodeName("normal_db"));
assertThat(hikariDataSource.getJdbcUrl(), is("jdbc:mock://127.0.0.1/normal_db"));
assertThat(hikariDataSource.getUsername(), is("root"));
assertThat(hikariDataSource.getPassword(), is(""));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration;
import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource;
import org.junit.jupiter.api.Test;
Expand All @@ -45,7 +45,7 @@ void assertGetDataSources() {
@Test
void assertGetStorageNodes() {
DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration();
MockedDataSource dataSource = (MockedDataSource) databaseConfig.getStorageResource().getDataSourceMap().get(new StorageNodeIdentifier("foo_ds"));
MockedDataSource dataSource = (MockedDataSource) databaseConfig.getStorageResource().getDataSourceMap().get(new StorageNodeName("foo_ds"));
assertThat(dataSource.getUrl(), is("jdbc:mock://127.0.0.1/foo_ds"));
assertThat(dataSource.getUsername(), is("root"));
assertThat(dataSource.getPassword(), is("root"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.shardingsphere.driver.jdbc.core.connection.ShardingSphereConnection;
import org.apache.shardingsphere.infra.config.rule.RuleConfiguration;
import org.apache.shardingsphere.infra.database.core.DefaultDatabase;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode;
import org.apache.shardingsphere.infra.state.cluster.ClusterState;
import org.apache.shardingsphere.infra.state.instance.InstanceState;
Expand Down Expand Up @@ -132,9 +132,9 @@ void assertClose() throws Exception {
try (HikariDataSource dataSource = createHikariDataSource()) {
ShardingSphereDataSource actual = createShardingSphereDataSource(dataSource);
actual.close();
Map<StorageNodeIdentifier, DataSource> dataSourceMap = getContextManager(actual).getMetaDataContexts().getMetaData()
Map<StorageNodeName, DataSource> dataSourceMap = getContextManager(actual).getMetaDataContexts().getMetaData()
.getDatabase(DefaultDatabase.LOGIC_NAME).getResourceMetaData().getDataSourceMap();
assertTrue(((HikariDataSource) dataSourceMap.get(new StorageNodeIdentifier("ds"))).isClosed());
assertTrue(((HikariDataSource) dataSourceMap.get(new StorageNodeName("ds"))).isClosed());
}
}

Expand Down
Loading

0 comments on commit f07257f

Please sign in to comment.