Skip to content

Commit

Permalink
Rename ResourceMetaData.dataSourceMap
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu committed Sep 24, 2023
1 parent c0394f5 commit 82479d6
Show file tree
Hide file tree
Showing 14 changed files with 53 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
@Getter
public final class ResourceMetaData {

private final Map<StorageNode, DataSource> storageNodeDataSources;
private final Map<StorageNode, DataSource> dataSourceMap;

private final StorageUnitMetaData storageUnitMetaData;

Expand All @@ -52,16 +52,15 @@ public ResourceMetaData(final Map<String, DataSource> dataSources) {
}

public ResourceMetaData(final String databaseName, final Map<String, DataSource> dataSources) {
storageNodeDataSources = StorageResourceUtils.getStorageNodeDataSources(dataSources);
storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources,
dataSources.entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)),
dataSourceMap = StorageResourceUtils.getStorageNodeDataSources(dataSources);
storageUnitMetaData = new StorageUnitMetaData(databaseName, dataSourceMap, dataSources.entrySet().stream()
.collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)),
StorageResourceUtils.getStorageUnitNodeMappers(dataSources));
}

public ResourceMetaData(final String databaseName, final StorageResource storageResource, final Map<String, DataSourcePoolProperties> propsMap) {
storageNodeDataSources = storageResource.getStorageNodeDataSources();
storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources, propsMap, storageResource.getStorageUnitNodeMappers());
dataSourceMap = storageResource.getDataSourceMap();
storageUnitMetaData = new StorageUnitMetaData(databaseName, dataSourceMap, propsMap, storageResource.getStorageUnitNodeMappers());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,22 +31,22 @@
@Getter
public class StorageResource {

private final Map<StorageNode, DataSource> storageNodeDataSources;
private final Map<StorageNode, DataSource> dataSourceMap;

private final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers;

private final Map<String, DataSource> wrappedDataSources;

public StorageResource(final Map<StorageNode, DataSource> storageNodeDataSources, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
this.storageNodeDataSources = storageNodeDataSources;
public StorageResource(final Map<StorageNode, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
this.dataSourceMap = dataSourceMap;
this.storageUnitNodeMappers = storageUnitNodeMappers;
wrappedDataSources = createWrappedDataSources();
}

private Map<String, DataSource> createWrappedDataSources() {
Map<String, DataSource> result = new LinkedHashMap<>(storageUnitNodeMappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : storageUnitNodeMappers.entrySet()) {
DataSource dataSource = storageNodeDataSources.get(entry.getValue().getStorageNode());
DataSource dataSource = dataSourceMap.get(entry.getValue().getStorageNode());
if (null != dataSource) {
result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ void assertGetDataSources() {
@Test
void assertGetStorageNodes() {
DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration();
HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getStorageNodeDataSources().get(new StorageNode("normal_db"));
HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getDataSourceMap().get(new StorageNode("normal_db"));
assertThat(hikariDataSource.getJdbcUrl(), is("jdbc:mock://127.0.0.1/normal_db"));
assertThat(hikariDataSource.getUsername(), is("root"));
assertThat(hikariDataSource.getPassword(), is(""));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ void assertGetDataSources() {
@Test
void assertGetStorageNodes() {
DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration();
MockedDataSource dataSource = (MockedDataSource) databaseConfig.getStorageResource().getStorageNodeDataSources().get(new StorageNode("foo_ds"));
MockedDataSource dataSource = (MockedDataSource) databaseConfig.getStorageResource().getDataSourceMap().get(new StorageNode("foo_ds"));
assertThat(dataSource.getUrl(), is("jdbc:mock://127.0.0.1/foo_ds"));
assertThat(dataSource.getUsername(), is("root"));
assertThat(dataSource.getPassword(), is("root"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ void assertClose() throws Exception {
ShardingSphereDataSource actual = createShardingSphereDataSource(dataSource);
actual.close();
Map<StorageNode, DataSource> dataSourceMap = getContextManager(actual).getMetaDataContexts().getMetaData()
.getDatabase(DefaultDatabase.LOGIC_NAME).getResourceMetaData().getStorageNodeDataSources();
.getDatabase(DefaultDatabase.LOGIC_NAME).getResourceMetaData().getDataSourceMap();
assertTrue(((HikariDataSource) dataSourceMap.get(new StorageNode("ds"))).isClosed());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ private Collection<RuleConfiguration> decorateRuleConfigs(final String databaseN

private Map<String, DataSourcePoolProperties> getDataSourcePoolPropertiesMap(final DatabaseConfiguration databaseConfigs) {
if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePoolPropertiesMap().isEmpty()) {
return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources());
return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getDataSourceMap());
}
return databaseConfigs.getDataSourcePoolPropertiesMap();
}
Expand All @@ -132,7 +132,7 @@ private Map<String, DataSourcePoolProperties> getDataSourcePoolPropertiesMap(fin
public Map<String, DataSourceConfiguration> getEffectiveDataSources(final String databaseName, final Map<String, ? extends DatabaseConfiguration> databaseConfigs) {
Map<String, DataSourcePoolProperties> propsMap = dataSourceUnitService.load(databaseName);
if (databaseConfigs.containsKey(databaseName) && !databaseConfigs.get(databaseName).getDataSources().isEmpty()) {
databaseConfigs.get(databaseName).getStorageResource().getStorageNodeDataSources().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy());
databaseConfigs.get(databaseName).getStorageResource().getDataSourceMap().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy());
}
return propsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey,
entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ private Collection<RuleConfiguration> decorateRuleConfigs(final String databaseN

private Map<String, DataSourcePoolProperties> getDataSourcePoolPropertiesMap(final DatabaseConfiguration databaseConfigs) {
if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePoolPropertiesMap().isEmpty()) {
return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources());
return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getDataSourceMap());
}
return databaseConfigs.getDataSourcePoolPropertiesMap();
}
Expand All @@ -145,7 +145,7 @@ private Map<String, DataSourcePoolProperties> getDataSourcePoolPropertiesMap(fin
public Map<String, DataSourceConfiguration> getEffectiveDataSources(final String databaseName, final Map<String, ? extends DatabaseConfiguration> databaseConfigs) {
Map<String, DataSourcePoolProperties> propsMap = dataSourceUnitService.load(databaseName);
if (databaseConfigs.containsKey(databaseName) && !databaseConfigs.get(databaseName).getDataSources().isEmpty()) {
databaseConfigs.get(databaseName).getStorageResource().getStorageNodeDataSources().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy());
databaseConfigs.get(databaseName).getStorageResource().getDataSourceMap().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy());
}
return propsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey,
entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ public void alterSchemaMetaData(final String databaseName, final ShardingSphereD
* @return ShardingSphere databases
*/
public Map<String, ShardingSphereDatabase> renewDatabase(final ShardingSphereDatabase database, final SwitchingResource resource) {
Map<StorageNode, DataSource> newStorageNodes = getNewStorageNodes(database.getResourceMetaData().getStorageNodeDataSources(), resource);
Map<StorageNode, DataSource> newStorageNodes = getNewStorageNodes(database.getResourceMetaData().getDataSourceMap(), resource);
Map<String, StorageUnitNodeMapper> newStorageUnitNodeMappers = getNewStorageUnitNodeMappers(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits(), resource);
StorageResource newStorageResource = new StorageResource(newStorageNodes, newStorageUnitNodeMappers);
Map<String, DataSourcePoolProperties> propsMap = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet().stream()
Expand All @@ -286,7 +286,7 @@ public Map<String, ShardingSphereDatabase> renewDatabase(final ShardingSphereDat
private Map<StorageNode, DataSource> getNewStorageNodes(final Map<StorageNode, DataSource> currentStorageNodes, final SwitchingResource resource) {
Map<StorageNode, DataSource> result = new LinkedHashMap<>();
for (Entry<StorageNode, DataSource> entry : currentStorageNodes.entrySet()) {
if (!resource.getStaleStorageResource().getStorageNodeDataSources().containsKey(entry.getKey())) {
if (!resource.getStaleStorageResource().getDataSourceMap().containsKey(entry.getKey())) {
result.put(entry.getKey(), entry.getValue());
}
}
Expand Down Expand Up @@ -365,10 +365,10 @@ private DatabaseConfiguration getDatabaseConfiguration(final ResourceMetaData re
}

private StorageResource getMergedStorageResource(final ResourceMetaData currentResourceMetaData, final SwitchingResource switchingResource) {
Map<StorageNode, DataSource> storageNodeDataSources = currentResourceMetaData.getStorageNodeDataSources();
Map<StorageNode, DataSource> storageNodeDataSources = currentResourceMetaData.getDataSourceMap();
Map<String, StorageUnitNodeMapper> storageUnitNodeMappers = currentResourceMetaData.getStorageUnitMetaData().getUnitNodeMappers();
if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getStorageNodeDataSources().isEmpty()) {
storageNodeDataSources.putAll(switchingResource.getNewStorageResource().getStorageNodeDataSources());
if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getDataSourceMap().isEmpty()) {
storageNodeDataSources.putAll(switchingResource.getNewStorageResource().getDataSourceMap());
}
if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getStorageUnitNodeMappers().isEmpty()) {
storageUnitNodeMappers.putAll(switchingResource.getNewStorageResource().getStorageUnitNodeMappers());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,9 @@ public SwitchingResource registerStorageUnit(final ResourceMetaData resourceMeta
}

private StorageResource getRegisterNewStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeCreatedStorageResource) {
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(toBeCreatedStorageResource.getStorageNodeDataSources().size(), 1F);
for (StorageNode each : toBeCreatedStorageResource.getStorageNodeDataSources().keySet()) {
if (!resourceMetaData.getStorageNodeDataSources().containsKey(each)) {
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(toBeCreatedStorageResource.getDataSourceMap().size(), 1F);
for (StorageNode each : toBeCreatedStorageResource.getDataSourceMap().keySet()) {
if (!resourceMetaData.getDataSourceMap().containsKey(each)) {
storageNodes.put(each, DataSourcePoolCreator.create(toBeCreatedStorageResource.getDataSourcePoolPropertiesMap().get(each.getName())));
}
}
Expand All @@ -80,17 +80,17 @@ public SwitchingResource alterStorageUnit(final ResourceMetaData resourceMetaDat
}

private StorageResource getAlterNewStorageResource(final StorageResourceWithProperties toBeAlteredStorageResource) {
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodeDataSources().size(), 1F);
for (StorageNode each : toBeAlteredStorageResource.getStorageNodeDataSources().keySet()) {
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getDataSourceMap().size(), 1F);
for (StorageNode each : toBeAlteredStorageResource.getDataSourceMap().keySet()) {
storageNodes.put(each, DataSourcePoolCreator.create(toBeAlteredStorageResource.getDataSourcePoolPropertiesMap().get(each.getName())));
}
return new StorageResource(storageNodes, toBeAlteredStorageResource.getStorageUnitNodeMappers());
}

private StorageResource getStaleStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeAlteredStorageResource) {
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodeDataSources().size(), 1F);
for (Entry<StorageNode, DataSource> entry : resourceMetaData.getStorageNodeDataSources().entrySet()) {
if (toBeAlteredStorageResource.getStorageNodeDataSources().containsKey(entry.getKey())) {
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getDataSourceMap().size(), 1F);
for (Entry<StorageNode, DataSource> entry : resourceMetaData.getDataSourceMap().entrySet()) {
if (toBeAlteredStorageResource.getDataSourceMap().containsKey(entry.getKey())) {
storageNodes.put(entry.getKey(), entry.getValue());
}
}
Expand All @@ -117,7 +117,7 @@ private StorageResource getToBeRemovedStaleStorageResource(final ResourceMetaDat
Map<String, StorageUnitNodeMapper> reservedStorageUnitNodeMappers = resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers();
Map<StorageNode, DataSource> storageNodes = new LinkedHashMap<>(1, 1F);
if (reservedStorageUnitNodeMappers.values().stream().noneMatch(each -> each.getStorageNode().equals(storageUnitNodeMapper.getStorageNode()))) {
storageNodes.put(storageUnitNodeMapper.getStorageNode(), resourceMetaData.getStorageNodeDataSources().get(storageUnitNodeMapper.getStorageNode()));
storageNodes.put(storageUnitNodeMapper.getStorageNode(), resourceMetaData.getDataSourceMap().get(storageUnitNodeMapper.getStorageNode()));
}
return new StorageResource(storageNodes, Collections.singletonMap(storageUnitName, storageUnitNodeMapper));
}
Expand Down
Loading

0 comments on commit 82479d6

Please sign in to comment.