Skip to content

Commit

Permalink
Rename StorageNodeIdentifier
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu committed Sep 27, 2023
1 parent ff79177 commit 125e003
Show file tree
Hide file tree
Showing 21 changed files with 140 additions and 133 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.StorageResource;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapperUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

import javax.sql.DataSource;
Expand Down Expand Up @@ -56,10 +56,11 @@ public DataSourceGeneratedDatabaseConfiguration(final Map<String, DataSourceConf
storageResource = new StorageResource(getStorageNodeDataSourceMap(mappers), mappers);
}

private Map<StorageNode, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNode, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
private Map<StorageNodeIdentifier, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNodeIdentifier, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : mappers.entrySet()) {
result.computeIfAbsent(entry.getValue().getStorageNode(), key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
result.computeIfAbsent(entry.getValue().getStorageNodeIdentifier(),
key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitMetaData;
Expand All @@ -43,7 +43,7 @@
@Getter
public final class ResourceMetaData {

private final Map<StorageNode, DataSource> dataSourceMap;
private final Map<StorageNodeIdentifier, DataSource> dataSourceMap;

private final StorageUnitMetaData storageUnitMetaData;

Expand All @@ -54,7 +54,7 @@ public ResourceMetaData(final Map<String, DataSource> dataSources) {
StorageUnitNodeMapperUtils.fromDataSources(dataSources));
}

public ResourceMetaData(final String databaseName, final Map<StorageNode, DataSource> dataSourceMap,
public ResourceMetaData(final String databaseName, final Map<StorageNodeIdentifier, DataSource> dataSourceMap,
final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers, final Map<String, DataSourcePoolProperties> propsMap) {
this.dataSourceMap = dataSourceMap;
storageUnitMetaData = new StorageUnitMetaData(databaseName, dataSourceMap, propsMap, storageUnitNodeMappers);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

import javax.sql.DataSource;
Expand All @@ -33,13 +33,13 @@
@Getter
public final class StorageResource {

private final Map<StorageNode, DataSource> dataSourceMap;
private final Map<StorageNodeIdentifier, DataSource> dataSourceMap;

private final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers;

private final Map<String, DataSource> wrappedDataSources;

public StorageResource(final Map<StorageNode, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
public StorageResource(final Map<StorageNodeIdentifier, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
this.dataSourceMap = dataSourceMap;
this.storageUnitNodeMappers = storageUnitNodeMappers;
wrappedDataSources = createWrappedDataSources();
Expand All @@ -48,7 +48,7 @@ public StorageResource(final Map<StorageNode, DataSource> dataSourceMap, final M
private Map<String, DataSource> createWrappedDataSources() {
Map<String, DataSource> result = new LinkedHashMap<>(storageUnitNodeMappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : storageUnitNodeMappers.entrySet()) {
DataSource dataSource = dataSourceMap.get(entry.getValue().getStorageNode());
DataSource dataSource = dataSourceMap.get(entry.getValue().getStorageNodeIdentifier());
if (null != dataSource) {
result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@
import lombok.RequiredArgsConstructor;

/**
* Storage node.
* Storage node identifier.
*/
@RequiredArgsConstructor
@Getter
public final class StorageNode {
public final class StorageNodeIdentifier {

private final String name;

@Override
public boolean equals(final Object obj) {
return obj instanceof StorageNode && ((StorageNode) obj).name.equalsIgnoreCase(name);
return obj instanceof StorageNodeIdentifier && ((StorageNodeIdentifier) obj).name.equalsIgnoreCase(name);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ public final class StorageNodeUtils {
* @param dataSources data sources
* @return storage node data sources
*/
public static Map<StorageNode, DataSource> getStorageNodeDataSources(final Map<String, DataSource> dataSources) {
return dataSources.entrySet().stream()
.collect(Collectors.toMap(entry -> new StorageNode(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSources.size(), 1F)));
public static Map<StorageNodeIdentifier, DataSource> getStorageNodeDataSources(final Map<String, DataSource> dataSources) {
return dataSources.entrySet().stream().collect(
Collectors.toMap(entry -> new StorageNodeIdentifier(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSources.size(), 1F)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager;

import javax.sql.DataSource;
Expand All @@ -53,44 +53,44 @@ public final class StorageUnit {

private final ConnectionProperties connectionProperties;

public StorageUnit(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources,
public StorageUnit(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources,
final DataSourcePoolProperties props, final StorageUnitNodeMapper unitNodeMapper) {
this.dataSourcePoolProperties = props;
this.unitNodeMapper = unitNodeMapper;
dataSource = getStorageUnitDataSource(storageNodeDataSources, unitNodeMapper);
Map<StorageNode, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources);
Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources);
storageType = createStorageType(enabledStorageNodeDataSources, unitNodeMapper);
connectionProperties = createConnectionProperties(enabledStorageNodeDataSources, unitNodeMapper, storageType).orElse(null);
}

private DataSource getStorageUnitDataSource(final Map<StorageNode, DataSource> storageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
DataSource dataSource = storageNodeDataSources.get(unitNodeMapper.getStorageNode());
private DataSource getStorageUnitDataSource(final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
DataSource dataSource = storageNodeDataSources.get(unitNodeMapper.getStorageNodeIdentifier());
return new CatalogSwitchableDataSource(dataSource, unitNodeMapper.getCatalog(), unitNodeMapper.getUrl());
}

private Map<StorageNode, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources) {
private Map<StorageNodeIdentifier, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources) {
Map<String, DataSource> toBeCheckedDataSources = new LinkedHashMap<>(storageNodeDataSources.size(), 1F);
for (Entry<StorageNode, DataSource> entry : storageNodeDataSources.entrySet()) {
for (Entry<StorageNodeIdentifier, DataSource> entry : storageNodeDataSources.entrySet()) {
toBeCheckedDataSources.put(entry.getKey().getName(), entry.getValue());
}
Map<String, DataSource> enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, toBeCheckedDataSources);
return storageNodeDataSources.entrySet().stream()
.filter(entry -> enabledDataSources.containsKey(entry.getKey().getName())).collect(Collectors.toMap(Entry::getKey, Entry::getValue));
}

private DatabaseType createStorageType(final Map<StorageNode, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode()))
private DatabaseType createStorageType(final Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNodeIdentifier())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNodeIdentifier()))
: Collections.emptyList());
}

private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNode, DataSource> enabledStorageNodeDataSources,
private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources,
final StorageUnitNodeMapper unitNodeMapper, final DatabaseType storageType) {
if (!enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode())) {
if (!enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNodeIdentifier())) {
return Optional.empty();
}
Map<String, Object> standardProps = DataSourcePoolPropertiesCreator.create(
enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode())).getConnectionPropertySynonyms().getStandardProperties();
enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNodeIdentifier())).getConnectionPropertySynonyms().getStandardProperties();
ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType);
return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), unitNodeMapper.getCatalog()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;

import javax.sql.DataSource;
import java.util.LinkedHashMap;
Expand All @@ -41,7 +41,7 @@ public final class StorageUnitMetaData {
// TODO zhangliang: should refactor
private final Map<String, DataSource> dataSources;

public StorageUnitMetaData(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources,
public StorageUnitMetaData(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources,
final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageUnitNodeMapper> unitNodeMappers) {
this.unitNodeMappers = unitNodeMappers;
storageUnits = new LinkedHashMap<>(unitNodeMappers.size(), 1F);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;

/**
* Storage unit and node mapper.
Expand All @@ -30,13 +30,13 @@ public final class StorageUnitNodeMapper {

private final String name;

private final StorageNode storageNode;
private final StorageNodeIdentifier storageNodeIdentifier;

private final String url;

private final String catalog;

public StorageUnitNodeMapper(final String name, final StorageNode storageNode, final String url) {
this(name, storageNode, url, null);
public StorageUnitNodeMapper(final String name, final StorageNodeIdentifier storageNodeIdentifier, final String url) {
this(name, storageNodeIdentifier, url, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;

import javax.sql.DataSource;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -54,7 +54,7 @@ public static Map<String, StorageUnitNodeMapper> fromDataSources(final Map<Strin
private static StorageUnitNodeMapper fromDataSource(final String storageUnitName, final DataSource dataSource) {
DataSourcePoolProperties props = DataSourcePoolPropertiesCreator.create(dataSource);
String url = props.getConnectionPropertySynonyms().getStandardProperties().get("url").toString();
return new StorageUnitNodeMapper(storageUnitName, new StorageNode(storageUnitName), url);
return new StorageUnitNodeMapper(storageUnitName, new StorageNodeIdentifier(storageUnitName), url);
}

/**
Expand All @@ -76,8 +76,8 @@ private static StorageUnitNodeMapper fromDataSourcePoolProperties(final String s
Map<String, Object> standardProps = props.getConnectionPropertySynonyms().getStandardProperties();
String url = standardProps.get("url").toString();
boolean isInstanceConnectionAvailable = new DatabaseTypeRegistry(DatabaseTypeFactory.get(url)).getDialectDatabaseMetaData().isInstanceConnectionAvailable();
StorageNode storageNode = new StorageNode(getStorageNodeName(storageUnitName, url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
return createStorageUnitNodeMapper(storageNode, storageUnitName, url, isInstanceConnectionAvailable);
StorageNodeIdentifier storageNodeIdentifier = new StorageNodeIdentifier(getStorageNodeName(storageUnitName, url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
return createStorageUnitNodeMapper(storageNodeIdentifier, storageUnitName, url, isInstanceConnectionAvailable);
}

private static String getStorageNodeName(final String dataSourceName, final String url, final String username, final boolean isInstanceConnectionAvailable) {
Expand All @@ -93,10 +93,11 @@ private static String generateStorageNodeName(final String hostname, final int p
return String.format("%s_%s_%s", hostname, port, username);
}

private static StorageUnitNodeMapper createStorageUnitNodeMapper(final StorageNode storageNode, final String storageUnitName, final String url, final boolean isInstanceConnectionAvailable) {
private static StorageUnitNodeMapper createStorageUnitNodeMapper(final StorageNodeIdentifier storageNodeIdentifier,
final String storageUnitName, final String url, final boolean isInstanceConnectionAvailable) {
return isInstanceConnectionAvailable
? new StorageUnitNodeMapper(storageUnitName, storageNode, url, new StandardJdbcUrlParser().parse(url).getDatabase())
: new StorageUnitNodeMapper(storageUnitName, storageNode, url);
? new StorageUnitNodeMapper(storageUnitName, storageNodeIdentifier, url, new StandardJdbcUrlParser().parse(url).getDatabase())
: new StorageUnitNodeMapper(storageUnitName, storageNodeIdentifier, url);
}

/**
Expand All @@ -105,14 +106,14 @@ private static StorageUnitNodeMapper createStorageUnitNodeMapper(final StorageNo
* @param storageUnitDataSourcePoolProps storage unit grouped data source pool properties map
* @return storage node grouped data source pool properties map
*/
public static Map<StorageNode, DataSourcePoolProperties> getStorageNodeDataSourcePoolProperties(final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Map<StorageNode, DataSourcePoolProperties> result = new LinkedHashMap<>();
public static Map<StorageNodeIdentifier, DataSourcePoolProperties> getStorageNodeDataSourcePoolProperties(final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Map<StorageNodeIdentifier, DataSourcePoolProperties> result = new LinkedHashMap<>();
for (Entry<String, DataSourcePoolProperties> entry : storageUnitDataSourcePoolProps.entrySet()) {
Map<String, Object> standardProps = entry.getValue().getConnectionPropertySynonyms().getStandardProperties();
String url = standardProps.get("url").toString();
boolean isInstanceConnectionAvailable = new DatabaseTypeRegistry(DatabaseTypeFactory.get(url)).getDialectDatabaseMetaData().isInstanceConnectionAvailable();
StorageNode storageNode = new StorageNode(getStorageNodeName(entry.getKey(), url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
result.putIfAbsent(storageNode, entry.getValue());
StorageNodeIdentifier storageNodeIdentifier = new StorageNodeIdentifier(getStorageNodeName(entry.getKey(), url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
result.putIfAbsent(storageNodeIdentifier, entry.getValue());
}
return result;
}
Expand Down
Loading

0 comments on commit 125e003

Please sign in to comment.