Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove StorageNodeName #28650

Merged
merged 1 commit into from
Oct 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.StorageResource;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapUtils;

Expand Down Expand Up @@ -54,22 +53,22 @@ public DataSourceGeneratedDatabaseConfiguration(final Map<String, DataSourceConf
Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap = dataSourceConfigs.entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new));
Map<String, StorageNode> storageUnitNodeMap = StorageUnitNodeMapUtils.fromDataSourcePoolProperties(dataSourcePoolPropertiesMap);
Map<StorageNodeName, DataSource> storageNodeDataSources = getStorageNodeDataSourceMap(dataSourcePoolPropertiesMap, storageUnitNodeMap);
Map<StorageNode, DataSource> storageNodeDataSources = getStorageNodeDataSourceMap(dataSourcePoolPropertiesMap, storageUnitNodeMap);
storageUnits = new LinkedHashMap<>(dataSourceConfigs.size(), 1F);
for (Entry<String, DataSourceConfiguration> entry : dataSourceConfigs.entrySet()) {
String storageUnitName = entry.getKey();
StorageNode storageNode = storageUnitNodeMap.get(storageUnitName);
DataSource dataSource = storageNodeDataSources.get(storageNode.getName());
DataSource dataSource = storageNodeDataSources.get(storageNode);
StorageUnit storageUnit = new StorageUnit(storageNode, dataSourcePoolPropertiesMap.get(storageUnitName), dataSource);
storageUnits.put(storageUnitName, storageUnit);
}
storageResource = new StorageResource(storageNodeDataSources, storageUnitNodeMap);
}

private Map<StorageNodeName, DataSource> getStorageNodeDataSourceMap(final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageNode> storageUnitNodeMap) {
Map<StorageNodeName, DataSource> result = new LinkedHashMap<>(storageUnitNodeMap.size(), 1F);
private Map<StorageNode, DataSource> getStorageNodeDataSourceMap(final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageNode> storageUnitNodeMap) {
Map<StorageNode, DataSource> result = new LinkedHashMap<>(storageUnitNodeMap.size(), 1F);
for (Entry<String, StorageNode> entry : storageUnitNodeMap.entrySet()) {
result.computeIfAbsent(entry.getValue().getName(), key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
result.computeIfAbsent(entry.getValue(), key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.shardingsphere.infra.metadata.database.resource.StorageResource;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeAggregator;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapUtils;

Expand All @@ -51,13 +50,13 @@ public final class DataSourceProvidedDatabaseConfiguration implements DatabaseCo
public DataSourceProvidedDatabaseConfiguration(final Map<String, DataSource> dataSources, final Collection<RuleConfiguration> ruleConfigs) {
this.ruleConfigurations = ruleConfigs;
Map<String, StorageNode> storageUnitNodeMap = StorageUnitNodeMapUtils.fromDataSources(dataSources);
Map<StorageNodeName, DataSource> storageNodeDataSources = StorageNodeAggregator.aggregateDataSources(dataSources);
Map<StorageNode, DataSource> storageNodeDataSources = StorageNodeAggregator.aggregateDataSources(dataSources);
Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap = createDataSourcePoolPropertiesMap(dataSources);
storageUnits = new LinkedHashMap<>(dataSourcePoolPropertiesMap.size(), 1F);
for (Entry<String, DataSourcePoolProperties> entry : dataSourcePoolPropertiesMap.entrySet()) {
String storageUnitName = entry.getKey();
StorageNode storageNode = storageUnitNodeMap.get(storageUnitName);
StorageUnit storageUnit = new StorageUnit(storageNode, dataSourcePoolPropertiesMap.get(storageUnitName), storageNodeDataSources.get(storageNode.getName()));
StorageUnit storageUnit = new StorageUnit(storageNode, dataSourcePoolPropertiesMap.get(storageUnitName), storageNodeDataSources.get(storageNode));
storageUnits.put(storageUnitName, storageUnit);
}
storageResource = new StorageResource(storageNodeDataSources, storageUnitNodeMap);
Expand All @@ -68,12 +67,12 @@ public DataSourceProvidedDatabaseConfiguration(final StorageResource storageReso
this.storageResource = storageResource;
this.ruleConfigurations = ruleConfigs;
Map<String, StorageNode> storageUnitNodeMap = StorageUnitNodeMapUtils.fromDataSourcePoolProperties(dataSourcePoolPropertiesMap);
Map<StorageNodeName, DataSource> storageNodeDataSources = storageResource.getDataSources();
Map<StorageNode, DataSource> storageNodeDataSources = storageResource.getDataSources();
storageUnits = new LinkedHashMap<>(dataSourcePoolPropertiesMap.size(), 1F);
for (Entry<String, DataSourcePoolProperties> entry : dataSourcePoolPropertiesMap.entrySet()) {
String storageUnitName = entry.getKey();
StorageNode storageNode = storageUnitNodeMap.get(storageUnitName);
StorageUnit storageUnit = new StorageUnit(storageNode, dataSourcePoolPropertiesMap.get(storageUnitName), storageNodeDataSources.get(storageNode.getName()));
StorageUnit storageUnit = new StorageUnit(storageNode, dataSourcePoolPropertiesMap.get(storageUnitName), storageNodeDataSources.get(storageNode));
storageUnits.put(storageUnitName, storageUnit);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeAggregator;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapUtils;

Expand All @@ -42,7 +41,7 @@
@Getter
public final class ResourceMetaData {

private final Map<StorageNodeName, DataSource> dataSources;
private final Map<StorageNode, DataSource> dataSources;

private final Map<String, StorageUnit> storageUnits;

Expand All @@ -53,7 +52,7 @@ public ResourceMetaData(final Map<String, DataSource> dataSources) {
Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new));
storageUnits = new LinkedHashMap<>();
for (Entry<String, StorageNode> entry : storageNodes.entrySet()) {
storageUnits.put(entry.getKey(), new StorageUnit(entry.getValue(), dataSourcePoolPropsMap.get(entry.getKey()), dataSources.get(entry.getValue().getName().getName())));
storageUnits.put(entry.getKey(), new StorageUnit(entry.getValue(), dataSourcePoolPropsMap.get(entry.getKey()), dataSources.get(entry.getValue().getName())));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;

import javax.sql.DataSource;
import java.util.Map;
Expand All @@ -32,7 +31,7 @@
@Getter
public final class StorageResource {

private final Map<StorageNodeName, DataSource> dataSources;
private final Map<StorageNode, DataSource> dataSources;

private final Map<String, StorageNode> storageUnitNodeMap;
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.shardingsphere.infra.metadata.database.resource.node;

import com.google.common.base.Objects;
import lombok.Getter;
import lombok.RequiredArgsConstructor;

Expand All @@ -27,5 +28,24 @@
@Getter
public final class StorageNode {

private final StorageNodeName name;
private final String name;

public StorageNode(final String hostname, final int port, final String username) {
name = String.format("%s_%s_%s", hostname, port, username);
}

@Override
public boolean equals(final Object obj) {
return obj instanceof StorageNode && ((StorageNode) obj).name.equalsIgnoreCase(name);
}

@Override
public int hashCode() {
return Objects.hashCode(name.toUpperCase());
}

@Override
public String toString() {
return name;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,40 +39,40 @@
public final class StorageNodeAggregator {

/**
* Aggregate data source map to storage node name grouped.
* Aggregate data source map to storage node grouped.
*
* @param dataSourceMap storage unit name and data source map
* @return storage node name and data source map
* @return storage node and data source map
*/
public static Map<StorageNodeName, DataSource> aggregateDataSources(final Map<String, DataSource> dataSourceMap) {
public static Map<StorageNode, DataSource> aggregateDataSources(final Map<String, DataSource> dataSourceMap) {
return dataSourceMap.entrySet().stream().collect(
Collectors.toMap(entry -> new StorageNodeName(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSourceMap.size(), 1F)));
Collectors.toMap(entry -> new StorageNode(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSourceMap.size(), 1F)));
}

/**
* Aggregate data source pool properties map to storage node name grouped.
* Aggregate data source pool properties map to storage node grouped.
*
* @param storageUnitDataSourcePoolPropsMap storage unit name and data source pool properties map
* @return storage node name and data source pool properties map
* @return storage node and data source pool properties map
*/
public static Map<StorageNodeName, DataSourcePoolProperties> aggregateDataSourcePoolProperties(final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolPropsMap) {
Map<StorageNodeName, DataSourcePoolProperties> result = new LinkedHashMap<>();
public static Map<StorageNode, DataSourcePoolProperties> aggregateDataSourcePoolProperties(final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolPropsMap) {
Map<StorageNode, DataSourcePoolProperties> result = new LinkedHashMap<>();
for (Entry<String, DataSourcePoolProperties> entry : storageUnitDataSourcePoolPropsMap.entrySet()) {
Map<String, Object> standardProps = entry.getValue().getConnectionPropertySynonyms().getStandardProperties();
String url = standardProps.get("url").toString();
boolean isInstanceConnectionAvailable = new DatabaseTypeRegistry(DatabaseTypeFactory.get(url)).getDialectDatabaseMetaData().isInstanceConnectionAvailable();
StorageNodeName storageNodeName = getStorageNodeName(entry.getKey(), url, standardProps.get("username").toString(), isInstanceConnectionAvailable);
result.putIfAbsent(storageNodeName, entry.getValue());
StorageNode storageNode = getStorageNode(entry.getKey(), url, standardProps.get("username").toString(), isInstanceConnectionAvailable);
result.putIfAbsent(storageNode, entry.getValue());
}
return result;
}

private static StorageNodeName getStorageNodeName(final String dataSourceName, final String url, final String username, final boolean isInstanceConnectionAvailable) {
private static StorageNode getStorageNode(final String dataSourceName, final String url, final String username, final boolean isInstanceConnectionAvailable) {
try {
JdbcUrl jdbcUrl = new StandardJdbcUrlParser().parse(url);
return isInstanceConnectionAvailable ? new StorageNodeName(jdbcUrl.getHostname(), jdbcUrl.getPort(), username) : new StorageNodeName(dataSourceName);
return isInstanceConnectionAvailable ? new StorageNode(jdbcUrl.getHostname(), jdbcUrl.getPort(), username) : new StorageNode(dataSourceName);
} catch (final UnrecognizedDatabaseURLException ex) {
return new StorageNodeName(dataSourceName);
return new StorageNode(dataSourceName);
}
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;

import javax.sql.DataSource;
import java.util.LinkedHashMap;
Expand All @@ -48,7 +47,7 @@ public final class StorageUnitNodeMapUtils {
*/
public static Map<String, StorageNode> fromDataSources(final Map<String, DataSource> dataSources) {
return dataSources.entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> new StorageNode(new StorageNodeName(entry.getKey())), (oldValue, currentValue) -> currentValue, LinkedHashMap::new));
.collect(Collectors.toMap(Entry::getKey, entry -> new StorageNode(entry.getKey()), (oldValue, currentValue) -> currentValue, LinkedHashMap::new));
}

/**
Expand All @@ -70,16 +69,15 @@ private static StorageNode fromDataSourcePoolProperties(final String storageUnit
Map<String, Object> standardProps = props.getConnectionPropertySynonyms().getStandardProperties();
String url = standardProps.get("url").toString();
boolean isInstanceConnectionAvailable = new DatabaseTypeRegistry(DatabaseTypeFactory.get(url)).getDialectDatabaseMetaData().isInstanceConnectionAvailable();
StorageNodeName storageNodeName = getStorageNodeName(storageUnitName, url, standardProps.get("username").toString(), isInstanceConnectionAvailable);
return new StorageNode(storageNodeName);
return getStorageNode(storageUnitName, url, standardProps.get("username").toString(), isInstanceConnectionAvailable);
}

private static StorageNodeName getStorageNodeName(final String dataSourceName, final String url, final String username, final boolean isInstanceConnectionAvailable) {
private static StorageNode getStorageNode(final String dataSourceName, final String url, final String username, final boolean isInstanceConnectionAvailable) {
try {
JdbcUrl jdbcUrl = new StandardJdbcUrlParser().parse(url);
return isInstanceConnectionAvailable ? new StorageNodeName(jdbcUrl.getHostname(), jdbcUrl.getPort(), username) : new StorageNodeName(dataSourceName);
return isInstanceConnectionAvailable ? new StorageNode(jdbcUrl.getHostname(), jdbcUrl.getPort(), username) : new StorageNode(dataSourceName);
} catch (final UnrecognizedDatabaseURLException ex) {
return new StorageNodeName(dataSourceName);
return new StorageNode(dataSourceName);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration;
import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.junit.jupiter.api.Test;

import javax.sql.DataSource;
Expand All @@ -50,7 +50,7 @@ void assertGetDataSources() {
@Test
void assertGetStorageNodes() {
DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration();
HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getDataSources().get(new StorageNodeName("normal_db"));
HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getDataSources().get(new StorageNode("normal_db"));
assertThat(hikariDataSource.getJdbcUrl(), is("jdbc:mock://127.0.0.1/normal_db"));
assertThat(hikariDataSource.getUsername(), is("root"));
assertThat(hikariDataSource.getPassword(), is(""));
Expand Down
Loading