Skip to content

Commit

Permalink
Remove StorageUnitMetaData.storageNodes
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu committed Sep 28, 2023
1 parent 972abd3 commit e765db8
Show file tree
Hide file tree
Showing 7 changed files with 72 additions and 14 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.infra.metadata.database.resource.unit;

import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;

import javax.sql.DataSource;

/**
* Storage unit meta data.
*/
@Getter
public final class NewStorageUnitMetaData {

private final StorageNode storageNode;

private final DataSourcePoolProperties dataSourcePoolProperties;

private final DataSource dataSource;

private final StorageUnit storageUnit;

public NewStorageUnitMetaData(final String databaseName, final StorageNode storageNode, final DataSourcePoolProperties dataSourcePoolProperties, final DataSource dataSource) {
this.storageNode = storageNode;
this.dataSourcePoolProperties = dataSourcePoolProperties;
this.dataSource = dataSource;
storageUnit = new StorageUnit(databaseName, dataSource, dataSourcePoolProperties, storageNode);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,24 @@
@Getter
public final class StorageUnitMetaData {

// TODO zhangliang: should refactor
private final Map<String, StorageNode> storageNodes;

private final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap;

private final Map<String, StorageUnit> storageUnits;

// TODO zhangliang: should refactor
private final Map<String, DataSource> dataSources;

private final Map<String, NewStorageUnitMetaData> metaDataMap;

public StorageUnitMetaData(final String databaseName, final Map<String, StorageNode> storageNodes, final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap,
final Map<StorageNodeName, DataSource> dataSources) {
this.storageNodes = storageNodes;
metaDataMap = new LinkedHashMap<>();
for (Entry<String, StorageNode> entry : storageNodes.entrySet()) {
metaDataMap.put(entry.getKey(), new NewStorageUnitMetaData(databaseName, entry.getValue(), dataSourcePoolPropertiesMap.get(entry.getKey()), dataSources.get(entry.getValue().getName())));
}
this.dataSourcePoolPropertiesMap = dataSourcePoolPropertiesMap;
storageUnits = storageNodes.entrySet().stream().collect(
Collectors.toMap(Entry::getKey, entry -> new StorageUnit(databaseName, dataSources.get(entry.getValue().getName()), dataSourcePoolPropertiesMap.get(entry.getKey()), entry.getValue()),
(oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(this.storageNodes.size(), 1F)));
(oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(storageNodes.size(), 1F)));
this.dataSources = storageUnits.entrySet().stream().collect(
Collectors.toMap(Entry::getKey, entry -> entry.getValue().getDataSource(), (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(storageUnits.size(), 1F)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,9 @@ private DatabaseConfiguration getDatabaseConfiguration(final ResourceMetaData re

private StorageResource getMergedStorageResource(final ResourceMetaData currentResourceMetaData, final SwitchingResource switchingResource) {
Map<StorageNodeName, DataSource> storageNodeDataSources = currentResourceMetaData.getDataSources();
Map<String, StorageNode> storageUnitNodeMap = currentResourceMetaData.getStorageUnitMetaData().getStorageNodes();
Map<String, StorageNode> storageUnitNodeMap = new LinkedHashMap<>(
currentResourceMetaData.getStorageUnitMetaData().getMetaDataMap().entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getStorageNode(), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)));
if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getDataSources().isEmpty()) {
storageNodeDataSources.putAll(switchingResource.getNewStorageResource().getDataSources());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,11 @@ public SwitchingResource unregisterStorageUnit(final ResourceMetaData resourceMe
}

private StorageResource getToBeRemovedStaleStorageResource(final ResourceMetaData resourceMetaData, final String storageUnitName) {
StorageNode storageNode = resourceMetaData.getStorageUnitMetaData().getStorageNodes().remove(storageUnitName);
Map<String, StorageNode> reservedStorageUintNodeMap = resourceMetaData.getStorageUnitMetaData().getStorageNodes();
Map<String, StorageNode> metaDataMap = new HashMap<>(
resourceMetaData.getStorageUnitMetaData().getMetaDataMap().entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getStorageNode())));
StorageNode storageNode = metaDataMap.remove(storageUnitName);
Map<String, StorageNode> reservedStorageUintNodeMap = resourceMetaData.getStorageUnitMetaData().getMetaDataMap().entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getStorageNode()));
Map<StorageNodeName, DataSource> storageNodes = new LinkedHashMap<>(1, 1F);
if (reservedStorageUintNodeMap.values().stream().noneMatch(each -> each.equals(storageNode))) {
storageNodes.put(storageNode.getName(), resourceMetaData.getDataSources().get(storageNode.getName()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,10 +112,12 @@ private Map<StorageNodeName, DataSource> getNewStorageNodes(final ResourceMetaDa
}

private Map<String, StorageNode> getNewStorageUnitNodeMap(final ResourceMetaData resourceMetaData, final Map<String, StorageNode> toBeChangedStorageUnitNodeMap) {
Map<String, StorageNode> result = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getStorageNodes());
Map<String, StorageNode> result = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getMetaDataMap().entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getStorageNode(), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)));
result.keySet().removeAll(getToBeDeletedStorageUnitNodeMap(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnitNodeMap.keySet()).keySet());
result.putAll(getChangedStorageUnitNodeMap(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnitNodeMap));
result.putAll(getToBeAddedStorageUnitNodeMap(resourceMetaData.getStorageUnitMetaData().getStorageNodes(), toBeChangedStorageUnitNodeMap));
result.putAll(getToBeAddedStorageUnitNodeMap(resourceMetaData.getStorageUnitMetaData().getMetaDataMap().entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getStorageNode(), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)), toBeChangedStorageUnitNodeMap));
return result;
}

Expand Down Expand Up @@ -164,7 +166,7 @@ private StorageResource getToBeRemovedStaleDataSources(final ResourceMetaData re
private StorageResource getStaleDataSources(final ResourceMetaData resourceMetaData, final Map<String, StorageNode> toBeChangedStorageUnitNodeMap,
final Map<String, DataSourcePoolProperties> storageUnitDataSourcePoolProps) {
Map<StorageNodeName, DataSource> storageNodes = new LinkedHashMap<>(resourceMetaData.getDataSources().size(), 1F);
Map<String, StorageNode> storageUnitNodeMap = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getStorageNodes().size(), 1F);
Map<String, StorageNode> storageUnitNodeMap = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getMetaDataMap().size(), 1F);
storageNodes.putAll(getToBeChangedDataSources(resourceMetaData.getDataSources(), StorageUnitNodeMapUtils.getStorageNodeDataSourcePoolProperties(storageUnitDataSourcePoolProps)));
storageUnitNodeMap.putAll(getChangedStorageUnitNodeMap(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnitNodeMap));
return new StorageResource(storageNodes, storageUnitNodeMap);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.NewStorageUnitMetaData;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitMetaData;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapUtils;
Expand Down Expand Up @@ -257,11 +258,14 @@ private ResourceMetaData createOriginalResource() {
Map<StorageNodeName, DataSource> storageNodeDataSourceMap = StorageNodeUtils.getStorageNodeDataSources(originalDataSources);
Map<String, StorageUnit> storageUnits = new LinkedHashMap<>(2, 1F);
Map<String, StorageNode> storageUnitNodeMap = StorageUnitNodeMapUtils.fromDataSources(originalDataSources);
Map<String, NewStorageUnitMetaData> metaDataMap = new LinkedHashMap<>(2, 1F);
for (Entry<String, StorageNode> entry : storageUnitNodeMap.entrySet()) {
storageUnits.put(entry.getKey(), new StorageUnit("foo_db", storageNodeDataSourceMap.get(entry.getValue().getName()), mock(DataSourcePoolProperties.class), entry.getValue()));
metaDataMap.put(entry.getKey(), new NewStorageUnitMetaData(
"foo_db", storageUnitNodeMap.get(entry.getKey()), mock(DataSourcePoolProperties.class), storageNodeDataSourceMap.get(entry.getValue().getName())));
}
when(result.getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits);
when(result.getStorageUnitMetaData().getStorageNodes()).thenReturn(storageUnitNodeMap);
when(result.getStorageUnitMetaData().getMetaDataMap()).thenReturn(metaDataMap);
when(result.getDataSources()).thenReturn(storageNodeDataSourceMap);
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ private ContextManagerBuilderParameter createContextManagerBuilderParameter() {
private Map<String, ShardingSphereDatabase> createDatabases() {
when(database.getName()).thenReturn("db");
ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS);
when(resourceMetaData.getStorageUnitMetaData().getStorageNodes()).thenReturn(Collections.emptyMap());
when(resourceMetaData.getStorageUnitMetaData().getMetaDataMap()).thenReturn(Collections.emptyMap());
when(database.getResourceMetaData()).thenReturn(resourceMetaData);
when(database.getSchemas()).thenReturn(Collections.singletonMap("foo_schema", new ShardingSphereSchema()));
when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE"));
Expand Down

0 comments on commit e765db8

Please sign in to comment.