Skip to content

Commit

Permalink
Refactor StorageNode
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu committed Sep 27, 2023
1 parent 125e003 commit 5d2ca78
Show file tree
Hide file tree
Showing 8 changed files with 78 additions and 38 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ public DataSourceGeneratedDatabaseConfiguration(final Map<String, DataSourceConf
private Map<StorageNodeIdentifier, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNodeIdentifier, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : mappers.entrySet()) {
result.computeIfAbsent(entry.getValue().getStorageNodeIdentifier(),
result.computeIfAbsent(entry.getValue().getStorageNode().getName(),
key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
}
return result;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

Expand Down Expand Up @@ -48,9 +49,10 @@ public StorageResource(final Map<StorageNodeIdentifier, DataSource> dataSourceMa
private Map<String, DataSource> createWrappedDataSources() {
Map<String, DataSource> result = new LinkedHashMap<>(storageUnitNodeMappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : storageUnitNodeMappers.entrySet()) {
DataSource dataSource = dataSourceMap.get(entry.getValue().getStorageNodeIdentifier());
StorageNode storageNode = entry.getValue().getStorageNode();
DataSource dataSource = dataSourceMap.get(storageNode.getName());
if (null != dataSource) {
result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl()));
result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, storageNode.getCatalog(), storageNode.getUrl()));
}
}
return result;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.infra.metadata.database.resource.node;

import lombok.Getter;

/**
* Storage node.
*/
@Getter
public final class StorageNode {

private final StorageNodeIdentifier name;

private final String url;

private final String catalog;

public StorageNode(final String name, final String url) {
this(name, url, null);
}

public StorageNode(final String name, final String url, final String catalog) {
this.name = new StorageNodeIdentifier(name);
this.url = url;
this.catalog = catalog;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager;

Expand Down Expand Up @@ -63,9 +64,10 @@ public StorageUnit(final String databaseName, final Map<StorageNodeIdentifier, D
connectionProperties = createConnectionProperties(enabledStorageNodeDataSources, unitNodeMapper, storageType).orElse(null);
}

private DataSource getStorageUnitDataSource(final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
DataSource dataSource = storageNodeDataSources.get(unitNodeMapper.getStorageNodeIdentifier());
return new CatalogSwitchableDataSource(dataSource, unitNodeMapper.getCatalog(), unitNodeMapper.getUrl());
private DataSource getStorageUnitDataSource(final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources, final StorageUnitNodeMapper mapper) {
StorageNode storageNode = mapper.getStorageNode();
DataSource dataSource = storageNodeDataSources.get(storageNode.getName());
return new CatalogSwitchableDataSource(dataSource, storageNode.getCatalog(), storageNode.getUrl());
}

private Map<StorageNodeIdentifier, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNodeIdentifier, DataSource> storageNodeDataSources) {
Expand All @@ -79,19 +81,20 @@ private Map<StorageNodeIdentifier, DataSource> getEnabledStorageNodeDataSources(
}

private DatabaseType createStorageType(final Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNodeIdentifier())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNodeIdentifier()))
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode().getName())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode().getName()))
: Collections.emptyList());
}

private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNodeIdentifier, DataSource> enabledStorageNodeDataSources,
final StorageUnitNodeMapper unitNodeMapper, final DatabaseType storageType) {
if (!enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNodeIdentifier())) {
final StorageUnitNodeMapper mapper, final DatabaseType storageType) {
StorageNode storageNode = mapper.getStorageNode();
if (!enabledStorageNodeDataSources.containsKey(storageNode.getName())) {
return Optional.empty();
}
Map<String, Object> standardProps = DataSourcePoolPropertiesCreator.create(
enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNodeIdentifier())).getConnectionPropertySynonyms().getStandardProperties();
enabledStorageNodeDataSources.get(storageNode.getName())).getConnectionPropertySynonyms().getStandardProperties();
ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType);
return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), unitNodeMapper.getCatalog()));
return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), storageNode.getCatalog()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;

/**
* Storage unit and node mapper.
Expand All @@ -30,13 +30,5 @@ public final class StorageUnitNodeMapper {

private final String name;

private final StorageNodeIdentifier storageNodeIdentifier;

private final String url;

private final String catalog;

public StorageUnitNodeMapper(final String name, final StorageNodeIdentifier storageNodeIdentifier, final String url) {
this(name, storageNodeIdentifier, url, null);
}
private final StorageNode storageNode;
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeIdentifier;

import javax.sql.DataSource;
Expand Down Expand Up @@ -54,7 +55,7 @@ public static Map<String, StorageUnitNodeMapper> fromDataSources(final Map<Strin
private static StorageUnitNodeMapper fromDataSource(final String storageUnitName, final DataSource dataSource) {
DataSourcePoolProperties props = DataSourcePoolPropertiesCreator.create(dataSource);
String url = props.getConnectionPropertySynonyms().getStandardProperties().get("url").toString();
return new StorageUnitNodeMapper(storageUnitName, new StorageNodeIdentifier(storageUnitName), url);
return new StorageUnitNodeMapper(storageUnitName, new StorageNode(storageUnitName, url));
}

/**
Expand All @@ -76,8 +77,8 @@ private static StorageUnitNodeMapper fromDataSourcePoolProperties(final String s
Map<String, Object> standardProps = props.getConnectionPropertySynonyms().getStandardProperties();
String url = standardProps.get("url").toString();
boolean isInstanceConnectionAvailable = new DatabaseTypeRegistry(DatabaseTypeFactory.get(url)).getDialectDatabaseMetaData().isInstanceConnectionAvailable();
StorageNodeIdentifier storageNodeIdentifier = new StorageNodeIdentifier(getStorageNodeName(storageUnitName, url, standardProps.get("username").toString(), isInstanceConnectionAvailable));
return createStorageUnitNodeMapper(storageNodeIdentifier, storageUnitName, url, isInstanceConnectionAvailable);
String storageNodeName = getStorageNodeName(storageUnitName, url, standardProps.get("username").toString(), isInstanceConnectionAvailable);
return createStorageUnitNodeMapper(storageNodeName, storageUnitName, url, isInstanceConnectionAvailable);
}

private static String getStorageNodeName(final String dataSourceName, final String url, final String username, final boolean isInstanceConnectionAvailable) {
Expand All @@ -93,11 +94,10 @@ private static String generateStorageNodeName(final String hostname, final int p
return String.format("%s_%s_%s", hostname, port, username);
}

private static StorageUnitNodeMapper createStorageUnitNodeMapper(final StorageNodeIdentifier storageNodeIdentifier,
final String storageUnitName, final String url, final boolean isInstanceConnectionAvailable) {
private static StorageUnitNodeMapper createStorageUnitNodeMapper(final String storageNodeName, final String storageUnitName, final String url, final boolean isInstanceConnectionAvailable) {
return isInstanceConnectionAvailable
? new StorageUnitNodeMapper(storageUnitName, storageNodeIdentifier, url, new StandardJdbcUrlParser().parse(url).getDatabase())
: new StorageUnitNodeMapper(storageUnitName, storageNodeIdentifier, url);
? new StorageUnitNodeMapper(storageUnitName, new StorageNode(storageNodeName, url, new StandardJdbcUrlParser().parse(url).getDatabase()))
: new StorageUnitNodeMapper(storageUnitName, new StorageNode(storageNodeName, url));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public SwitchingResource registerStorageUnit(final ResourceMetaData resourceMeta

private StorageResource getRegisterNewStorageResource(final ResourceMetaData resourceMetaData,
final Map<String, StorageUnitNodeMapper> mappers, final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Collection<StorageNodeIdentifier> storageNodeIdentifiers = mappers.values().stream().map(StorageUnitNodeMapper::getStorageNodeIdentifier).collect(Collectors.toSet());
Collection<StorageNodeIdentifier> storageNodeIdentifiers = mappers.values().stream().map(each -> each.getStorageNode().getName()).collect(Collectors.toSet());
Map<StorageNodeIdentifier, DataSourcePoolProperties> storageNodeDataSourcePoolProps = StorageUnitNodeMapperUtils.getStorageNodeDataSourcePoolProperties(storageUnitDataSourcePoolProps);
Map<StorageNodeIdentifier, DataSource> newStorageNodes = new LinkedHashMap<>(storageNodeIdentifiers.size(), 1F);
for (StorageNodeIdentifier each : storageNodeIdentifiers) {
Expand All @@ -84,7 +84,7 @@ public SwitchingResource alterStorageUnit(final ResourceMetaData resourceMetaDat
}

private StorageResource getAlterNewStorageResource(final Map<String, StorageUnitNodeMapper> mappers, final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Collection<StorageNodeIdentifier> toBeAlteredStorageNodeIdentifiers = mappers.values().stream().map(StorageUnitNodeMapper::getStorageNodeIdentifier).collect(Collectors.toSet());
Collection<StorageNodeIdentifier> toBeAlteredStorageNodeIdentifiers = mappers.values().stream().map(each -> each.getStorageNode().getName()).collect(Collectors.toSet());
Map<StorageNodeIdentifier, DataSourcePoolProperties> storageNodeDataSourcePoolProps = StorageUnitNodeMapperUtils.getStorageNodeDataSourcePoolProperties(storageUnitDataSourcePoolProps);
Map<StorageNodeIdentifier, DataSource> storageNodes = new LinkedHashMap<>(toBeAlteredStorageNodeIdentifiers.size(), 1F);
for (StorageNodeIdentifier each : toBeAlteredStorageNodeIdentifiers) {
Expand All @@ -94,7 +94,7 @@ private StorageResource getAlterNewStorageResource(final Map<String, StorageUnit
}

private StorageResource getStaleStorageResource(final ResourceMetaData resourceMetaData, final Map<String, StorageUnitNodeMapper> mappers) {
Collection<StorageNodeIdentifier> toBeAlteredStorageNodeIdentifiers = mappers.values().stream().map(StorageUnitNodeMapper::getStorageNodeIdentifier).collect(Collectors.toSet());
Collection<StorageNodeIdentifier> toBeAlteredStorageNodeIdentifiers = mappers.values().stream().map(each -> each.getStorageNode().getName()).collect(Collectors.toSet());
Map<StorageNodeIdentifier, DataSource> storageNodes = new LinkedHashMap<>(toBeAlteredStorageNodeIdentifiers.size(), 1F);
for (Entry<StorageNodeIdentifier, DataSource> entry : resourceMetaData.getDataSourceMap().entrySet()) {
if (toBeAlteredStorageNodeIdentifiers.contains(entry.getKey())) {
Expand Down Expand Up @@ -123,8 +123,8 @@ private StorageResource getToBeRemovedStaleStorageResource(final ResourceMetaDat
StorageUnitNodeMapper storageUnitNodeMapper = resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers().remove(storageUnitName);
Map<String, StorageUnitNodeMapper> reservedStorageUnitNodeMappers = resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers();
Map<StorageNodeIdentifier, DataSource> storageNodes = new LinkedHashMap<>(1, 1F);
if (reservedStorageUnitNodeMappers.values().stream().noneMatch(each -> each.getStorageNodeIdentifier().equals(storageUnitNodeMapper.getStorageNodeIdentifier()))) {
storageNodes.put(storageUnitNodeMapper.getStorageNodeIdentifier(), resourceMetaData.getDataSourceMap().get(storageUnitNodeMapper.getStorageNodeIdentifier()));
if (reservedStorageUnitNodeMappers.values().stream().noneMatch(each -> each.getStorageNode().equals(storageUnitNodeMapper.getStorageNode()))) {
storageNodes.put(storageUnitNodeMapper.getStorageNode().getName(), resourceMetaData.getDataSourceMap().get(storageUnitNodeMapper.getStorageNode().getName()));
}
return new StorageResource(storageNodes, Collections.singletonMap(storageUnitName, storageUnitNodeMapper));
}
Expand Down
Loading

0 comments on commit 5d2ca78

Please sign in to comment.