diff --git a/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/converter/EncryptRuleStatementConverter.java b/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/converter/EncryptRuleStatementConverter.java index ab105ee0406d6..a486da95094a6 100644 --- a/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/converter/EncryptRuleStatementConverter.java +++ b/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/converter/EncryptRuleStatementConverter.java @@ -58,35 +58,34 @@ public static EncryptRuleConfiguration convert(final Collection columns = new LinkedList<>(); for (EncryptColumnSegment each : ruleSegment.getColumns()) { - columns.add(createEncryptColumnRuleConfiguration(ruleSegment.getTableName(), each)); + columns.add(createEncryptColumnRuleConfiguration(ruleSegment, each)); } return new EncryptTableRuleConfiguration(ruleSegment.getTableName(), columns); } - private static EncryptColumnRuleConfiguration createEncryptColumnRuleConfiguration(final String tableName, final EncryptColumnSegment columnSegment) { + private static EncryptColumnRuleConfiguration createEncryptColumnRuleConfiguration(final EncryptRuleSegment ruleSegment, final EncryptColumnSegment columnSegment) { EncryptColumnItemRuleConfiguration cipherColumnConfig = new EncryptColumnItemRuleConfiguration( - columnSegment.getCipher().getName(), getEncryptorName(tableName, columnSegment.getName())); + columnSegment.getCipher().getName(), getEncryptorName(ruleSegment.getTableName(), columnSegment.getName())); EncryptColumnRuleConfiguration result = new EncryptColumnRuleConfiguration(columnSegment.getName(), cipherColumnConfig); if (null != columnSegment.getAssistedQuery()) { - setAssistedQuery(tableName, columnSegment, result); + setAssistedQuery(ruleSegment.getTableName(), columnSegment, result); } if (null != columnSegment.getLikeQuery()) { - setLikeQuery(tableName, columnSegment, result); + setLikeQuery(ruleSegment.getTableName(), columnSegment, result); } return result; } - private static void setAssistedQuery(final String tableName, final EncryptColumnSegment columnSegment, final EncryptColumnRuleConfiguration result) { - String assistedQueryEncryptorName = null == columnSegment.getAssistedQuery().getEncryptor() ? null - : getAssistedQueryEncryptorName(tableName, columnSegment.getName()); + private static void setAssistedQuery(final String tableName, final EncryptColumnSegment columnSegment, final EncryptColumnRuleConfiguration columnRuleConfig) { + String assistedQueryEncryptorName = null == columnSegment.getAssistedQuery().getEncryptor() ? null : getAssistedQueryEncryptorName(tableName, columnSegment.getName()); EncryptColumnItemRuleConfiguration assistedQueryColumnConfig = new EncryptColumnItemRuleConfiguration(columnSegment.getAssistedQuery().getName(), assistedQueryEncryptorName); - result.setAssistedQuery(assistedQueryColumnConfig); + columnRuleConfig.setAssistedQuery(assistedQueryColumnConfig); } - private static void setLikeQuery(final String tableName, final EncryptColumnSegment columnSegment, final EncryptColumnRuleConfiguration result) { + private static void setLikeQuery(final String tableName, final EncryptColumnSegment columnSegment, final EncryptColumnRuleConfiguration columnRuleConfig) { String likeQueryEncryptorName = null == columnSegment.getLikeQuery().getEncryptor() ? null : getLikeQueryEncryptorName(tableName, columnSegment.getName()); EncryptColumnItemRuleConfiguration likeQueryColumnConfig = new EncryptColumnItemRuleConfiguration(columnSegment.getLikeQuery().getName(), likeQueryEncryptorName); - result.setLikeQuery(likeQueryColumnConfig); + columnRuleConfig.setLikeQuery(likeQueryColumnConfig); } private static Map createEncryptorConfigurations(final EncryptRuleSegment ruleSegment) { diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/adapter/AbstractStatementAdapter.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/adapter/AbstractStatementAdapter.java index fc82caf0a1252..dabd42d7d2a83 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/adapter/AbstractStatementAdapter.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/adapter/AbstractStatementAdapter.java @@ -58,13 +58,15 @@ public abstract class AbstractStatementAdapter extends AbstractUnsupportedOperat private boolean closed; protected final boolean isNeedImplicitCommitTransaction(final ShardingSphereConnection connection, final ExecutionContext executionContext) { - return connection.getAutoCommit() && isNotInDistributedTransaction(connection) && isModifiedSQL(executionContext) && executionContext.getExecutionUnits().size() > 1; - } - - private boolean isNotInDistributedTransaction(final ShardingSphereConnection connection) { + if (connection.getAutoCommit()) { + return false; + } ConnectionTransaction connectionTransaction = connection.getDatabaseConnectionManager().getConnectionTransaction(); boolean isInTransaction = connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction(); - return TransactionType.isDistributedTransaction(connectionTransaction.getTransactionType()) && !isInTransaction; + if (!TransactionType.isDistributedTransaction(connectionTransaction.getTransactionType()) || isInTransaction) { + return false; + } + return isModifiedSQL(executionContext) && executionContext.getExecutionUnits().size() > 1; } private boolean isModifiedSQL(final ExecutionContext executionContext) { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractPipelineJob.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractPipelineJob.java index 5fceae71fc5f9..e307fe0f67959 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractPipelineJob.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractPipelineJob.java @@ -24,7 +24,6 @@ import org.apache.shardingsphere.data.pipeline.common.job.PipelineJob; import org.apache.shardingsphere.data.pipeline.common.listener.PipelineElasticJobListener; import org.apache.shardingsphere.data.pipeline.common.metadata.node.PipelineMetaDataNode; -import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import org.apache.shardingsphere.data.pipeline.common.util.PipelineDistributedBarrier; import org.apache.shardingsphere.data.pipeline.core.exception.PipelineInternalException; import org.apache.shardingsphere.data.pipeline.core.job.progress.persist.PipelineJobProgressPersistService; @@ -34,6 +33,7 @@ import org.apache.shardingsphere.elasticjob.infra.spi.ElasticJobServiceLoader; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.JobBootstrap; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import java.sql.SQLException; import java.util.ArrayList; @@ -90,27 +90,14 @@ protected void prepare(final PipelineJobItemContext jobItemContext) { try { doPrepare(jobItemContext); // CHECKSTYLE:OFF - } catch (final RuntimeException ex) { - // CHECKSTYLE:ON - processFailed(jobItemContext, ex); - throw ex; - // CHECKSTYLE:OFF } catch (final SQLException ex) { // CHECKSTYLE:ON - processFailed(jobItemContext, ex); throw new PipelineInternalException(ex); } } protected abstract void doPrepare(PipelineJobItemContext jobItemContext) throws SQLException; - protected void processFailed(final PipelineJobItemContext jobItemContext, final Exception ex) { - String jobId = jobItemContext.getJobId(); - log.error("job prepare failed, {}-{}", jobId, jobItemContext.getShardingItem(), ex); - jobAPI.updateJobItemErrorMessage(jobItemContext.getJobId(), jobItemContext.getShardingItem(), ex); - jobAPI.stop(jobId); - } - @Override public Optional getTasksRunner(final int shardingItem) { return Optional.ofNullable(tasksRunnerMap.get(shardingItem)); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java index 7480585c91609..3491f3882b168 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java @@ -52,14 +52,33 @@ public void execute(final ShardingContext shardingContext) { log.info("stopping true, ignore"); return; } - PipelineJobItemContext jobItemContext = buildPipelineJobItemContext(shardingContext); + try { + PipelineJobItemContext jobItemContext = buildPipelineJobItemContext(shardingContext); + execute0(jobItemContext); + // CHECKSTYLE:OFF + } catch (final RuntimeException ex) { + // CHECKSTYLE:ON + processFailed(jobId, shardingItem, ex); + throw ex; + } + } + + private void execute0(final PipelineJobItemContext jobItemContext) { + String jobId = jobItemContext.getJobId(); + int shardingItem = jobItemContext.getShardingItem(); PipelineTasksRunner tasksRunner = buildPipelineTasksRunner(jobItemContext); if (!addTasksRunner(shardingItem, tasksRunner)) { return; } - getJobAPI().cleanJobItemErrorMessage(jobId, jobItemContext.getShardingItem()); + getJobAPI().cleanJobItemErrorMessage(jobId, shardingItem); prepare(jobItemContext); log.info("start tasks runner, jobId={}, shardingItem={}", jobId, shardingItem); tasksRunner.start(); } + + private void processFailed(final String jobId, final int shardingItem, final Exception ex) { + log.error("job prepare failed, {}-{}", jobId, shardingItem, ex); + getJobAPI().updateJobItemErrorMessage(jobId, shardingItem, ex); + getJobAPI().stop(jobId); + } } diff --git a/kernel/data-pipeline/scenario/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/CDCClient.java b/kernel/data-pipeline/scenario/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/CDCClient.java index abe9ede4905fc..ada8bf5215259 100644 --- a/kernel/data-pipeline/scenario/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/CDCClient.java +++ b/kernel/data-pipeline/scenario/cdc/client/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/client/CDCClient.java @@ -81,7 +81,7 @@ private void validateParameter(final CDCClientConfiguration parameter) { @SneakyThrows(InterruptedException.class) public void connect() { Bootstrap bootstrap = new Bootstrap(); - group = new NioEventLoopGroup(); + group = new NioEventLoopGroup(1); bootstrap.channel(NioSocketChannel.class) .group(group) .option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT) diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java index 5f4edfc4bda48..7e28a6b378759 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java @@ -36,7 +36,6 @@ import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; -import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import org.apache.shardingsphere.data.pipeline.core.importer.sink.PipelineSink; import org.apache.shardingsphere.data.pipeline.core.job.AbstractPipelineJob; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; @@ -44,6 +43,7 @@ import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner; import org.apache.shardingsphere.elasticjob.api.ShardingContext; import org.apache.shardingsphere.elasticjob.simple.job.SimpleJob; +import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import java.util.Collection; import java.util.LinkedList; @@ -114,17 +114,15 @@ private void prepare(final Collection jobItemContexts) { } catch (final RuntimeException ex) { // CHECKSTYLE:ON for (PipelineJobItemContext each : jobItemContexts) { - processFailed(each, ex); + processFailed(each.getJobId(), each.getShardingItem(), ex); } throw ex; } } - @Override - protected void processFailed(final PipelineJobItemContext jobItemContext, final Exception ex) { - String jobId = jobItemContext.getJobId(); - log.error("job prepare failed, {}-{}", jobId, jobItemContext.getShardingItem(), ex); - jobAPI.updateJobItemErrorMessage(jobItemContext.getJobId(), jobItemContext.getShardingItem(), ex); + private void processFailed(final String jobId, final int shardingItem, final Exception ex) { + log.error("job prepare failed, {}-{}", jobId, shardingItem, ex); + jobAPI.updateJobItemErrorMessage(jobId, shardingItem, ex); PipelineJobCenter.stop(jobId); jobAPI.updateJobConfigurationDisabled(jobId, true); } diff --git a/kernel/sql-federation/api/src/main/java/org/apache/shardingsphere/sqlfederation/api/config/SQLFederationRuleConfiguration.java b/kernel/sql-federation/api/src/main/java/org/apache/shardingsphere/sqlfederation/api/config/SQLFederationRuleConfiguration.java index e1c09e0d13a56..757f982f08c9f 100644 --- a/kernel/sql-federation/api/src/main/java/org/apache/shardingsphere/sqlfederation/api/config/SQLFederationRuleConfiguration.java +++ b/kernel/sql-federation/api/src/main/java/org/apache/shardingsphere/sqlfederation/api/config/SQLFederationRuleConfiguration.java @@ -26,9 +26,9 @@ /** * SQL federation rule configuration. */ +@RequiredArgsConstructor @Getter @Setter -@RequiredArgsConstructor public final class SQLFederationRuleConfiguration implements GlobalRuleConfiguration { private final boolean sqlFederationEnabled; diff --git a/kernel/sql-federation/distsql/handler/src/main/java/org/apache/shardingsphere/sqlfederation/distsql/handler/query/ShowSQLFederationRuleExecutor.java b/kernel/sql-federation/distsql/handler/src/main/java/org/apache/shardingsphere/sqlfederation/distsql/handler/query/ShowSQLFederationRuleExecutor.java index 801d16d346817..f91ae15b29af6 100644 --- a/kernel/sql-federation/distsql/handler/src/main/java/org/apache/shardingsphere/sqlfederation/distsql/handler/query/ShowSQLFederationRuleExecutor.java +++ b/kernel/sql-federation/distsql/handler/src/main/java/org/apache/shardingsphere/sqlfederation/distsql/handler/query/ShowSQLFederationRuleExecutor.java @@ -36,8 +36,10 @@ public final class ShowSQLFederationRuleExecutor implements MetaDataRequiredQuer @Override public Collection getRows(final ShardingSphereMetaData metaData, final ShowSQLFederationRuleStatement sqlStatement) { SQLFederationRuleConfiguration ruleConfig = metaData.getGlobalRuleMetaData().getSingleRule(SQLFederationRule.class).getConfiguration(); - return Collections.singleton(new LocalDataQueryResultRow(String.valueOf(ruleConfig.isSqlFederationEnabled()), - null != ruleConfig.getExecutionPlanCache() ? ruleConfig.getExecutionPlanCache().toString() : "")); + String sqlFederationEnabled = String.valueOf(ruleConfig.isSqlFederationEnabled()); + String executionPlanCache = null != ruleConfig.getExecutionPlanCache() ? ruleConfig.getExecutionPlanCache().toString() : ""; + LocalDataQueryResultRow row = new LocalDataQueryResultRow(sqlFederationEnabled, executionPlanCache); + return Collections.singleton(row); } @Override diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java index 25604d8d3db42..13ea8dcc689c6 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java @@ -20,7 +20,6 @@ import org.apache.shardingsphere.distsql.handler.query.RQLExecutor; import org.apache.shardingsphere.distsql.parser.statement.rql.show.ShowStorageUnitsStatement; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; -import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; @@ -28,7 +27,6 @@ import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; -import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.util.json.JsonUtils; @@ -57,15 +55,15 @@ public Collection getColumnNames() { @Override public Collection getRows(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { - ResourceMetaData resourceMetaData = database.getResourceMetaData(); Collection result = new LinkedList<>(); for (Entry entry : getDataSourcePoolPropertiesMap(database, sqlStatement).entrySet()) { - String key = entry.getKey(); - ConnectionProperties connectionProps = resourceMetaData.getStorageUnits().get(key).getConnectionProperties(); + String storageUnitName = entry.getKey(); + StorageUnit storageUnit = database.getResourceMetaData().getStorageUnits().get(storageUnitName); + ConnectionProperties connectionProps = storageUnit.getConnectionProperties(); Map poolProps = entry.getValue().getPoolPropertySynonyms().getStandardProperties(); - Map customProps = getCustomProps(entry.getValue().getCustomProperties().getProperties(), connectionProps.getQueryProperties()); - result.add(new LocalDataQueryResultRow(key, - resourceMetaData.getStorageUnits().get(key).getStorageType().getType(), + Map customProps = getCustomProperties(entry.getValue().getCustomProperties().getProperties(), connectionProps.getQueryProperties()); + result.add(new LocalDataQueryResultRow(storageUnitName, + storageUnit.getStorageType().getType(), connectionProps.getHostname(), connectionProps.getPort(), connectionProps.getCatalog(), @@ -80,15 +78,6 @@ public Collection getRows(final ShardingSphereDatabase return result; } - private Map getCustomProps(final Map customProps, final Properties queryProps) { - Map result = new LinkedHashMap<>(customProps.size() + 1, 1F); - result.putAll(customProps); - if (!queryProps.isEmpty()) { - result.put("queryProperties", queryProps); - } - return result; - } - private Map getDataSourcePoolPropertiesMap(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { Map result = new LinkedHashMap<>(database.getResourceMetaData().getStorageUnits().size(), 1F); Map propsMap = database.getResourceMetaData().getStorageUnits().entrySet().stream() @@ -113,11 +102,10 @@ private Map getDataSourcePoolPropertiesMap(fin private DataSourcePoolProperties getDataSourcePoolProperties(final Map propsMap, final String storageUnitName, final DatabaseType databaseType, final DataSource dataSource) { - DataSourcePoolProperties result = getDataSourcePoolProperties(dataSource); - DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); - if (dialectDatabaseMetaData.isInstanceConnectionAvailable() && propsMap.containsKey(storageUnitName)) { - DataSourcePoolProperties unitDataSourcePoolProps = propsMap.get(storageUnitName); - for (Entry entry : unitDataSourcePoolProps.getPoolPropertySynonyms().getStandardProperties().entrySet()) { + DataSourcePoolProperties result = DataSourcePoolPropertiesCreator.create( + dataSource instanceof CatalogSwitchableDataSource ? ((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource); + if (new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData().isInstanceConnectionAvailable() && propsMap.containsKey(storageUnitName)) { + for (Entry entry : propsMap.get(storageUnitName).getPoolPropertySynonyms().getStandardProperties().entrySet()) { if (null != entry.getValue()) { result.getPoolPropertySynonyms().getStandardProperties().put(entry.getKey(), entry.getValue()); } @@ -126,10 +114,12 @@ private DataSourcePoolProperties getDataSourcePoolProperties(final Map getCustomProperties(final Map customProps, final Properties queryProps) { + Map result = new LinkedHashMap<>(customProps); + if (!queryProps.isEmpty()) { + result.put("queryProperties", queryProps); + } + return result; } private String getStandardProperty(final Map standardProps, final String key) { diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java index b8372ffb327bc..cee1acd14c6ec 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java @@ -44,8 +44,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Answers; import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoSettings; -import org.mockito.quality.Strictness; import java.sql.SQLException; import java.util.Collections; @@ -60,7 +58,6 @@ @ExtendWith(AutoMockExtension.class) @StaticMockSettings(ProxyContext.class) -@MockitoSettings(strictness = Strictness.LENIENT) class UnregisterStorageUnitBackendHandlerTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) @@ -112,6 +109,7 @@ void assertExecute() throws SQLException { when(storageUnit.getDataSource()).thenReturn(new MockedDataSource()); when(resourceMetaData.getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); + when(database.getRuleMetaData().getInUsedStorageUnitNameAndRulesMap()).thenReturn(Collections.emptyMap()); when(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); UnregisterStorageUnitStatement unregisterStorageUnitStatement = new UnregisterStorageUnitStatement(Collections.singleton("foo_ds"), false); assertThat(handler.execute("foo_db", unregisterStorageUnitStatement), instanceOf(UpdateResponseHeader.class));