diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.cn.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.cn.md index 9cc71819a773f..cf10cca4ccb62 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.cn.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.cn.md @@ -13,10 +13,7 @@ weight = 12 {{% tab name="语法" %}} ```sql ExportDatabaseConfiguration ::= - 'IMPORT' 'DATABASE' 'CONFIGURATION' 'FROM' 'FILE' filePath ('TO' databaseName)? - -databaseName ::= - identifier + 'IMPORT' 'DATABASE' 'CONFIGURATION' 'FROM' 'FILE' filePath filePath ::= string @@ -38,7 +35,7 @@ filePath ::= - 将 `YAML` 中的配置导入到指定逻辑库中 ```sql -IMPORT DATABASE CONFIGURATION FROM FILE "/xxx/config_sharding_db.yaml" TO sharding_db; +IMPORT DATABASE CONFIGURATION FROM FILE "/xxx/config_sharding_db.yaml"; ``` - 将 `YAML` 中的配置导入到当前逻辑库中 @@ -49,8 +46,8 @@ IMPORT DATABASE CONFIGURATION FROM FILE "/xxx/config_sharding_db.yaml"; ### 保留字 -`IMPORT`、`DATABASE`、`CONFIGURATION`、`FROM`、`FILE`、`TO` +`IMPORT`、`DATABASE`、`CONFIGURATION`、`FROM`、`FILE` ### 相关链接 -- [保留字](/cn/user-manual/shardingsphere-proxy/distsql/syntax/reserved-word/) \ No newline at end of file +- [保留字](/cn/user-manual/shardingsphere-proxy/distsql/syntax/reserved-word/) diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.en.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.en.md index fda7344b777fe..66c057f539bff 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.en.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/ral/import-database-configuration.en.md @@ -13,10 +13,7 @@ The `IMPORT DATABASE CONFIGURATION` syntax is used to import `YAML` configuratio {{% tab name="Grammar" %}} ```sql ExportDatabaseConfiguration ::= - 'IMPORT' 'DATABASE' 'CONFIGURATION' 'FROM' 'FILE' filePath ('TO' databaseName)? - -databaseName ::= - identifier + 'IMPORT' 'DATABASE' 'CONFIGURATION' 'FROM' 'FILE' filePath filePath ::= string @@ -38,7 +35,7 @@ filePath ::= - Import the configuration in `YAML` into the specified database ```sql -IMPORT DATABASE CONFIGURATION FROM FILE "/xxx/config_sharding_db.yaml" TO sharding_db; +IMPORT DATABASE CONFIGURATION FROM FILE "/xxx/config_sharding_db.yaml"; ``` - Import the configuration in `YAML` into the current database @@ -49,7 +46,7 @@ IMPORT DATABASE CONFIGURATION FROM FILE "/xxx/config_sharding_db.yaml"; ### Reserved word -`IMPORT`, `DATABASE`, `CONFIGURATION`, `FROM`, `FILE`, `TO` +`IMPORT`, `DATABASE`, `CONFIGURATION`, `FROM`, `FILE` ### Related links diff --git a/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/query/ShowEncryptRuleExecutorTest.java b/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/query/ShowEncryptRuleExecutorTest.java index a334774dc1d99..02d4d7ff7e317 100644 --- a/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/query/ShowEncryptRuleExecutorTest.java +++ b/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/query/ShowEncryptRuleExecutorTest.java @@ -58,7 +58,7 @@ void assertGetRowData() { assertThat(row.getCell(4), is("user_assisted")); assertThat(row.getCell(5), is("user_like")); assertThat(row.getCell(6), is("md5")); - assertThat(row.getCell(7), is("{}")); + assertThat(row.getCell(7), is("")); assertThat(row.getCell(8), is("")); assertThat(row.getCell(9), is("")); assertThat(row.getCell(10), is("")); diff --git a/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/query/ShowMaskRuleExecutorTest.java b/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/query/ShowMaskRuleExecutorTest.java index 096704a189fdc..a6e59fc54e54f 100644 --- a/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/query/ShowMaskRuleExecutorTest.java +++ b/features/mask/distsql/handler/src/test/java/org/apache/shardingsphere/mask/distsql/handler/query/ShowMaskRuleExecutorTest.java @@ -55,7 +55,7 @@ void assertGetRowData() { assertThat(row.getCell(1), is("t_mask")); assertThat(row.getCell(2), is("user_id")); assertThat(row.getCell(3), is("md5")); - assertThat(row.getCell(4), is("{}")); + assertThat(row.getCell(4), is("")); } @Test diff --git a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java index 91270ddc484db..593cabbd9f8fe 100644 --- a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java +++ b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java @@ -56,14 +56,14 @@ void assertGetRowData() { assertThat(row.getCell(3), is("source")); assertThat(row.getCell(4), is("shadow")); assertThat(row.getCell(5), is("REGEX_MATCH")); - assertThat(row.getCell(6), is("{}")); + assertThat(row.getCell(6), is("")); row = iterator.next(); assertThat(row.getCell(1), is("t_order_item")); assertThat(row.getCell(2), is("shadow_rule")); assertThat(row.getCell(3), is("source")); assertThat(row.getCell(4), is("shadow")); assertThat(row.getCell(5), is("REGEX_MATCH")); - assertThat(row.getCell(6), is("{}")); + assertThat(row.getCell(6), is("")); } @Test diff --git a/features/sharding/core/pom.xml b/features/sharding/core/pom.xml index 00a10e4d655a7..ccfb28a8a3982 100644 --- a/features/sharding/core/pom.xml +++ b/features/sharding/core/pom.xml @@ -83,6 +83,16 @@ shardingsphere-infra-expr-core ${project.version} + + org.apache.shardingsphere + shardingsphere-infra-key-generator-uuid + ${project.version} + + + org.apache.shardingsphere + shardingsphere-infra-key-generator-snowflake + ${project.version} + org.apache.shardingsphere diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecorator.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecorator.java index 14ecbe373768c..eb0e9688b7f59 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecorator.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecorator.java @@ -19,6 +19,7 @@ import lombok.Setter; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContextDecorator; @@ -39,15 +40,16 @@ public final class ShardingSQLRewriteContextDecorator implements SQLRewriteConte @Override public void decorate(final ShardingRule shardingRule, final ConfigurationProperties props, final SQLRewriteContext sqlRewriteContext, final RouteContext routeContext) { - if (!containsShardingTable(shardingRule, sqlRewriteContext.getSqlStatementContext())) { + SQLStatementContext sqlStatementContext = sqlRewriteContext.getSqlStatementContext(); + if (sqlStatementContext instanceof InsertStatementContext && !containsShardingTable(shardingRule, sqlStatementContext)) { return; } if (!sqlRewriteContext.getParameters().isEmpty()) { Collection parameterRewriters = - new ShardingParameterRewriterBuilder(shardingRule, routeContext, sqlRewriteContext.getDatabase().getSchemas(), sqlRewriteContext.getSqlStatementContext()).getParameterRewriters(); + new ShardingParameterRewriterBuilder(shardingRule, routeContext, sqlRewriteContext.getDatabase().getSchemas(), sqlStatementContext).getParameterRewriters(); rewriteParameters(sqlRewriteContext, parameterRewriters); } - sqlRewriteContext.addSQLTokenGenerators(new ShardingTokenGenerateBuilder(shardingRule, routeContext, sqlRewriteContext.getSqlStatementContext()).getSQLTokenGenerators()); + sqlRewriteContext.addSQLTokenGenerators(new ShardingTokenGenerateBuilder(shardingRule, routeContext, sqlStatementContext).getSQLTokenGenerators()); } private boolean containsShardingTable(final ShardingRule shardingRule, final SQLStatementContext sqlStatementContext) { diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java index 2b38f3e591ef8..e1f69d895675e 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java @@ -36,6 +36,8 @@ import org.apache.shardingsphere.infra.rule.identifier.type.TableNamesMapper; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.exception.algorithm.GenerateKeyStrategyNotFoundException; import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingAutoTableRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableReferenceRuleConfiguration; @@ -48,11 +50,9 @@ import org.apache.shardingsphere.sharding.api.config.strategy.sharding.StandardShardingStrategyConfiguration; import org.apache.shardingsphere.sharding.api.sharding.ShardingAutoTableAlgorithm; import org.apache.shardingsphere.sharding.cache.ShardingCache; -import org.apache.shardingsphere.sharding.exception.algorithm.keygen.GenerateKeyStrategyNotFoundException; import org.apache.shardingsphere.sharding.exception.algorithm.sharding.ShardingAlgorithmInitializationException; import org.apache.shardingsphere.sharding.exception.metadata.InvalidBindingTablesException; import org.apache.shardingsphere.sharding.exception.metadata.ShardingTableRuleNotFoundException; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.spi.ShardingAlgorithm; import org.apache.shardingsphere.sharding.spi.ShardingAuditAlgorithm; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/AutoIncrementKeyGenerateAlgorithmFixture.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/AutoIncrementKeyGenerateAlgorithmFixture.java index f593618cf1717..a2533d6c055b5 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/AutoIncrementKeyGenerateAlgorithmFixture.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/AutoIncrementKeyGenerateAlgorithmFixture.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.sharding.algorithm.keygen.fixture; import lombok.Getter; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; import java.util.concurrent.atomic.AtomicInteger; diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecoratorTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecoratorTest.java index 221f51abb7a80..7ad1b58e9f385 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecoratorTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/context/ShardingSQLRewriteContextDecoratorTest.java @@ -18,6 +18,7 @@ package org.apache.shardingsphere.sharding.rewrite.context; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; @@ -26,6 +27,7 @@ import org.junit.jupiter.api.Test; import java.util.Collections; +import java.util.Optional; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; @@ -43,4 +45,16 @@ void assertDecorate() { new ShardingSQLRewriteContextDecorator().decorate(mock(ShardingRule.class), mock(ConfigurationProperties.class), sqlRewriteContext, mock(RouteContext.class)); assertTrue(sqlRewriteContext.getSqlTokens().isEmpty()); } + + @Test + void assertDecorateWhenInsertStatementNotContainsShardingTable() { + SQLRewriteContext sqlRewriteContext = mock(SQLRewriteContext.class); + InsertStatementContext insertStatementContext = mock(InsertStatementContext.class, RETURNS_DEEP_STUBS); + when(insertStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_order")); + when(sqlRewriteContext.getSqlStatementContext()).thenReturn(insertStatementContext); + ShardingRule shardingRule = mock(ShardingRule.class); + when(shardingRule.findTableRule("t_order")).thenReturn(Optional.empty()); + new ShardingSQLRewriteContextDecorator().decorate(shardingRule, mock(ConfigurationProperties.class), sqlRewriteContext, mock(RouteContext.class)); + assertTrue(sqlRewriteContext.getSqlTokens().isEmpty()); + } } diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java index 2281e541c4029..8ec0df9b69f12 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java @@ -29,9 +29,10 @@ import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.keygen.core.exception.algorithm.GenerateKeyStrategyNotFoundException; +import org.apache.shardingsphere.keygen.snowflake.algorithm.SnowflakeKeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.uuid.algorithm.UUIDKeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.algorithm.audit.DMLShardingConditionsShardingAuditAlgorithm; -import org.apache.shardingsphere.sharding.algorithm.keygen.SnowflakeKeyGenerateAlgorithm; -import org.apache.shardingsphere.sharding.algorithm.keygen.UUIDKeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingAutoTableRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableReferenceRuleConfiguration; @@ -42,7 +43,6 @@ import org.apache.shardingsphere.sharding.api.config.strategy.sharding.NoneShardingStrategyConfiguration; import org.apache.shardingsphere.sharding.api.config.strategy.sharding.ShardingStrategyConfiguration; import org.apache.shardingsphere.sharding.api.config.strategy.sharding.StandardShardingStrategyConfiguration; -import org.apache.shardingsphere.sharding.exception.algorithm.keygen.GenerateKeyStrategyNotFoundException; import org.apache.shardingsphere.sharding.exception.algorithm.sharding.ShardingAlgorithmInitializationException; import org.apache.shardingsphere.sharding.exception.metadata.InvalidBindingTablesException; import org.apache.shardingsphere.sharding.exception.metadata.ShardingTableRuleNotFoundException; diff --git a/features/sharding/core/src/test/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm b/features/sharding/core/src/test/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm similarity index 100% rename from features/sharding/core/src/test/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm rename to features/sharding/core/src/test/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm diff --git a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java index 22dfe5cfca058..deed64a6b8c44 100644 --- a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java +++ b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/checker/ShardingTableRuleStatementChecker.java @@ -32,6 +32,7 @@ import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingAutoTableRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableReferenceRuleConfiguration; @@ -56,7 +57,6 @@ import org.apache.shardingsphere.sharding.exception.strategy.InvalidShardingStrategyConfigurationException; import org.apache.shardingsphere.sharding.rule.BindingTableCheckedConfiguration; import org.apache.shardingsphere.sharding.rule.TableRule; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.spi.ShardingAlgorithm; import org.apache.shardingsphere.sharding.spi.ShardingAuditAlgorithm; diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/fixture/keygen/DistSQLKeyGenerateAlgorithmFixture.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/fixture/keygen/DistSQLKeyGenerateAlgorithmFixture.java index 6afc3738e045e..4bbd012d604e2 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/fixture/keygen/DistSQLKeyGenerateAlgorithmFixture.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/fixture/keygen/DistSQLKeyGenerateAlgorithmFixture.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.sharding.distsql.fixture.keygen; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; public final class DistSQLKeyGenerateAlgorithmFixture implements KeyGenerateAlgorithm { diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRuleExecutorTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRuleExecutorTest.java index 63e737d7de863..353d56c86e958 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRuleExecutorTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRuleExecutorTest.java @@ -69,7 +69,7 @@ void assertGetRowData() { assertThat(row.getCell(11), is("{\"algorithm-expression\":\"t_order_${order_id % 2}\"}")); assertThat(row.getCell(12), is("order_id")); assertThat(row.getCell(13), is("SNOWFLAKE")); - assertThat(row.getCell(14), is("{}")); + assertThat(row.getCell(14), is("")); assertThat(row.getCell(15), is("DML_SHARDING_CONDITIONS")); assertThat(row.getCell(16), is("true")); } diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowUnusedShardingKeyGeneratorExecutorTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowUnusedShardingKeyGeneratorExecutorTest.java index 40b33b7bce985..ea1b40a2c3a3d 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowUnusedShardingKeyGeneratorExecutorTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowUnusedShardingKeyGeneratorExecutorTest.java @@ -53,7 +53,7 @@ void assertGetRowData() { LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("uuid_key_generator")); assertThat(row.getCell(2), is("UUID")); - assertThat(row.getCell(3), is("{}")); + assertThat(row.getCell(3), is("")); } @Test diff --git a/features/sharding/distsql/handler/src/test/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm b/features/sharding/distsql/handler/src/test/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm similarity index 100% rename from features/sharding/distsql/handler/src/test/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm rename to features/sharding/distsql/handler/src/test/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm diff --git a/infra/algorithm/key-generator/core/pom.xml b/infra/algorithm/key-generator/core/pom.xml new file mode 100644 index 0000000000000..5128f2bdafa5f --- /dev/null +++ b/infra/algorithm/key-generator/core/pom.xml @@ -0,0 +1,43 @@ + + + + + 4.0.0 + + org.apache.shardingsphere + shardingsphere-infra-key-generator + 5.4.2-SNAPSHOT + + shardingsphere-infra-key-generator-core + ${project.artifactId} + + + + org.apache.shardingsphere + shardingsphere-infra-common + ${project.version} + + + + org.apache.shardingsphere + shardingsphere-test-util + ${project.version} + test + + + diff --git a/features/sharding/api/src/main/java/org/apache/shardingsphere/sharding/spi/KeyGenerateAlgorithm.java b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/algorithm/KeyGenerateAlgorithm.java similarity index 95% rename from features/sharding/api/src/main/java/org/apache/shardingsphere/sharding/spi/KeyGenerateAlgorithm.java rename to infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/algorithm/KeyGenerateAlgorithm.java index 8b147fa5c35f2..638636a960bfd 100644 --- a/features/sharding/api/src/main/java/org/apache/shardingsphere/sharding/spi/KeyGenerateAlgorithm.java +++ b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/algorithm/KeyGenerateAlgorithm.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.spi; +package org.apache.shardingsphere.keygen.core.algorithm; import org.apache.shardingsphere.infra.algorithm.ShardingSphereAlgorithm; diff --git a/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/KeyGenerateSQLException.java b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/KeyGenerateSQLException.java new file mode 100644 index 0000000000000..ab7a70f033b5b --- /dev/null +++ b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/KeyGenerateSQLException.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.keygen.core.exception; + +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.SQLState; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.feature.FeatureSQLException; + +/** + * Key generate SQL exception. + */ +public abstract class KeyGenerateSQLException extends FeatureSQLException { + + private static final int FEATURE_CODE = 11; + + private static final long serialVersionUID = 3124409584064186239L; + + protected KeyGenerateSQLException(final SQLState sqlState, final int errorCode, final String reason, final Object... messageArgs) { + super(sqlState, FEATURE_CODE, errorCode, reason, messageArgs); + } +} diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/GenerateKeyStrategyNotFoundException.java b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/algorithm/GenerateKeyStrategyNotFoundException.java similarity index 88% rename from features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/GenerateKeyStrategyNotFoundException.java rename to infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/algorithm/GenerateKeyStrategyNotFoundException.java index d0c30a9ac9c68..2fb7b0634ff94 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/GenerateKeyStrategyNotFoundException.java +++ b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/algorithm/GenerateKeyStrategyNotFoundException.java @@ -15,15 +15,15 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.exception.algorithm.keygen; +package org.apache.shardingsphere.keygen.core.exception.algorithm; import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; -import org.apache.shardingsphere.sharding.exception.ShardingSQLException; +import org.apache.shardingsphere.keygen.core.exception.KeyGenerateSQLException; /** * Generate key strategy not found exception. */ -public final class GenerateKeyStrategyNotFoundException extends ShardingSQLException { +public final class GenerateKeyStrategyNotFoundException extends KeyGenerateSQLException { private static final long serialVersionUID = 7456922260524630374L; diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/KeyGenerateAlgorithmInitializationException.java b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/algorithm/KeyGenerateAlgorithmInitializationException.java similarity index 83% rename from features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/KeyGenerateAlgorithmInitializationException.java rename to infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/algorithm/KeyGenerateAlgorithmInitializationException.java index b585a8e1a41d8..df32efacacdab 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/KeyGenerateAlgorithmInitializationException.java +++ b/infra/algorithm/key-generator/core/src/main/java/org/apache/shardingsphere/keygen/core/exception/algorithm/KeyGenerateAlgorithmInitializationException.java @@ -15,17 +15,17 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.exception.algorithm.keygen; +package org.apache.shardingsphere.keygen.core.exception.algorithm; import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; -import org.apache.shardingsphere.sharding.exception.ShardingSQLException; +import org.apache.shardingsphere.keygen.core.exception.KeyGenerateSQLException; /** * Key generate algorithm initialization exception. */ -public final class KeyGenerateAlgorithmInitializationException extends ShardingSQLException { +public final class KeyGenerateAlgorithmInitializationException extends KeyGenerateSQLException { - private static final long serialVersionUID = -9046956561006694072L; + private static final long serialVersionUID = 4137100879778822323L; public KeyGenerateAlgorithmInitializationException(final String keyGenerateType, final String reason) { super(XOpenSQLState.GENERAL_ERROR, 91, "Key generate algorithm `%s` initialization failed, reason is: %s.", keyGenerateType, reason); diff --git a/infra/algorithm/key-generator/pom.xml b/infra/algorithm/key-generator/pom.xml new file mode 100644 index 0000000000000..d737f6fdffb8a --- /dev/null +++ b/infra/algorithm/key-generator/pom.xml @@ -0,0 +1,34 @@ + + + + + 4.0.0 + + org.apache.shardingsphere + shardingsphere-infra-algorithm + 5.4.2-SNAPSHOT + + shardingsphere-infra-key-generator + pom + ${project.artifactId} + + + core + type + + diff --git a/infra/algorithm/key-generator/type/pom.xml b/infra/algorithm/key-generator/type/pom.xml new file mode 100644 index 0000000000000..47c37fc365b35 --- /dev/null +++ b/infra/algorithm/key-generator/type/pom.xml @@ -0,0 +1,34 @@ + + + + + 4.0.0 + + org.apache.shardingsphere + shardingsphere-infra-key-generator + 5.4.2-SNAPSHOT + + shardingsphere-infra-key-generator-type + pom + ${project.artifactId} + + + snowflake + uuid + + diff --git a/infra/algorithm/key-generator/type/snowflake/pom.xml b/infra/algorithm/key-generator/type/snowflake/pom.xml new file mode 100644 index 0000000000000..74a8b23ead459 --- /dev/null +++ b/infra/algorithm/key-generator/type/snowflake/pom.xml @@ -0,0 +1,52 @@ + + + + + 4.0.0 + + org.apache.shardingsphere + shardingsphere-infra-key-generator-type + 5.4.2-SNAPSHOT + + shardingsphere-infra-key-generator-snowflake + ${project.artifactId} + + + + org.apache.shardingsphere + shardingsphere-infra-common + ${project.version} + + + org.apache.shardingsphere + shardingsphere-infra-key-generator-core + ${project.version} + + + + org.apache.shardingsphere + shardingsphere-test-util + ${project.version} + test + + + org.awaitility + awaitility + + + diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/SnowflakeKeyGenerateAlgorithm.java b/infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/algorithm/SnowflakeKeyGenerateAlgorithm.java similarity index 95% rename from features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/SnowflakeKeyGenerateAlgorithm.java rename to infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/algorithm/SnowflakeKeyGenerateAlgorithm.java index 0d5e4a2b2d5e5..95fe29dcca276 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/SnowflakeKeyGenerateAlgorithm.java +++ b/infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/algorithm/SnowflakeKeyGenerateAlgorithm.java @@ -15,16 +15,16 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen; +package org.apache.shardingsphere.keygen.snowflake.algorithm; import lombok.Setter; import lombok.SneakyThrows; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.instance.InstanceContextAware; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.sharding.exception.algorithm.keygen.KeyGenerateAlgorithmInitializationException; -import org.apache.shardingsphere.sharding.exception.algorithm.keygen.SnowflakeClockMoveBackException; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.exception.algorithm.KeyGenerateAlgorithmInitializationException; +import org.apache.shardingsphere.keygen.snowflake.exception.SnowflakeClockMoveBackException; import java.time.Instant; import java.time.LocalDateTime; diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/TimeService.java b/infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/algorithm/TimeService.java similarity index 94% rename from features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/TimeService.java rename to infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/algorithm/TimeService.java index a2dcbf490197b..5468a8ac456a1 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/TimeService.java +++ b/infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/algorithm/TimeService.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen; +package org.apache.shardingsphere.keygen.snowflake.algorithm; /** * Time service. diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/SnowflakeClockMoveBackException.java b/infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/exception/SnowflakeClockMoveBackException.java similarity index 80% rename from features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/SnowflakeClockMoveBackException.java rename to infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/exception/SnowflakeClockMoveBackException.java index ca5adbaa68da3..823a0e359d2f2 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/exception/algorithm/keygen/SnowflakeClockMoveBackException.java +++ b/infra/algorithm/key-generator/type/snowflake/src/main/java/org/apache/shardingsphere/keygen/snowflake/exception/SnowflakeClockMoveBackException.java @@ -15,17 +15,17 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.exception.algorithm.keygen; +package org.apache.shardingsphere.keygen.snowflake.exception; import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; -import org.apache.shardingsphere.sharding.exception.ShardingSQLException; +import org.apache.shardingsphere.keygen.core.exception.KeyGenerateSQLException; /** * Snowflake clock move back exception. */ -public final class SnowflakeClockMoveBackException extends ShardingSQLException { +public final class SnowflakeClockMoveBackException extends KeyGenerateSQLException { - private static final long serialVersionUID = -2435731376659956566L; + private static final long serialVersionUID = 3076059285632288623L; public SnowflakeClockMoveBackException(final long lastMillis, final long currentMillis) { super(XOpenSQLState.GENERAL_ERROR, 92, "Clock is moving backwards, last time is %d milliseconds, current time is %d milliseconds.", lastMillis, currentMillis); diff --git a/features/sharding/core/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm b/infra/algorithm/key-generator/type/snowflake/src/main/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm similarity index 83% rename from features/sharding/core/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm rename to infra/algorithm/key-generator/type/snowflake/src/main/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm index 3ce4e58b0d48a..e61918401f509 100644 --- a/features/sharding/core/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm +++ b/infra/algorithm/key-generator/type/snowflake/src/main/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm @@ -15,5 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.sharding.algorithm.keygen.SnowflakeKeyGenerateAlgorithm -org.apache.shardingsphere.sharding.algorithm.keygen.UUIDKeyGenerateAlgorithm +org.apache.shardingsphere.keygen.snowflake.algorithm.SnowflakeKeyGenerateAlgorithm diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/SnowflakeKeyGenerateAlgorithmTest.java b/infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/algorithm/SnowflakeKeyGenerateAlgorithmTest.java similarity index 96% rename from features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/SnowflakeKeyGenerateAlgorithmTest.java rename to infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/algorithm/SnowflakeKeyGenerateAlgorithmTest.java index 95bae0f3de111..d5ff80bed737f 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/SnowflakeKeyGenerateAlgorithmTest.java +++ b/infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/algorithm/SnowflakeKeyGenerateAlgorithmTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen; +package org.apache.shardingsphere.keygen.snowflake.algorithm; import lombok.SneakyThrows; import org.apache.shardingsphere.infra.config.mode.ModeConfiguration; @@ -25,13 +25,13 @@ import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.lock.LockContext; -import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.sharding.algorithm.keygen.fixture.FixedTimeService; -import org.apache.shardingsphere.sharding.algorithm.keygen.fixture.WorkerIdGeneratorFixture; -import org.apache.shardingsphere.sharding.exception.algorithm.keygen.KeyGenerateAlgorithmInitializationException; -import org.apache.shardingsphere.sharding.exception.algorithm.keygen.SnowflakeClockMoveBackException; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.exception.algorithm.KeyGenerateAlgorithmInitializationException; +import org.apache.shardingsphere.keygen.snowflake.exception.SnowflakeClockMoveBackException; +import org.apache.shardingsphere.keygen.snowflake.fixture.FixedTimeService; +import org.apache.shardingsphere.keygen.snowflake.fixture.WorkerIdGeneratorFixture; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; import org.awaitility.Awaitility; diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/FixedTimeService.java b/infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/fixture/FixedTimeService.java similarity index 85% rename from features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/FixedTimeService.java rename to infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/fixture/FixedTimeService.java index 0546c9c688038..990e46436298c 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/FixedTimeService.java +++ b/infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/fixture/FixedTimeService.java @@ -15,11 +15,11 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen.fixture; +package org.apache.shardingsphere.keygen.snowflake.fixture; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.sharding.algorithm.keygen.SnowflakeKeyGenerateAlgorithm; -import org.apache.shardingsphere.sharding.algorithm.keygen.TimeService; +import org.apache.shardingsphere.keygen.snowflake.algorithm.SnowflakeKeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.snowflake.algorithm.TimeService; import java.util.concurrent.atomic.AtomicInteger; diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/WorkerIdGeneratorFixture.java b/infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/fixture/WorkerIdGeneratorFixture.java similarity index 95% rename from features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/WorkerIdGeneratorFixture.java rename to infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/fixture/WorkerIdGeneratorFixture.java index 10218a80bb40c..88628abb557b6 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/fixture/WorkerIdGeneratorFixture.java +++ b/infra/algorithm/key-generator/type/snowflake/src/test/java/org/apache/shardingsphere/keygen/snowflake/fixture/WorkerIdGeneratorFixture.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen.fixture; +package org.apache.shardingsphere.keygen.snowflake.fixture; import com.google.common.base.Preconditions; import lombok.RequiredArgsConstructor; diff --git a/infra/algorithm/key-generator/type/uuid/pom.xml b/infra/algorithm/key-generator/type/uuid/pom.xml new file mode 100644 index 0000000000000..cdeef599b3351 --- /dev/null +++ b/infra/algorithm/key-generator/type/uuid/pom.xml @@ -0,0 +1,52 @@ + + + + + 4.0.0 + + org.apache.shardingsphere + shardingsphere-infra-key-generator-type + 5.4.2-SNAPSHOT + + shardingsphere-infra-key-generator-uuid + ${project.artifactId} + + + + org.apache.shardingsphere + shardingsphere-infra-common + ${project.version} + + + org.apache.shardingsphere + shardingsphere-infra-key-generator-core + ${project.version} + + + + org.apache.shardingsphere + shardingsphere-test-util + ${project.version} + test + + + org.awaitility + awaitility + + + diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/UUIDKeyGenerateAlgorithm.java b/infra/algorithm/key-generator/type/uuid/src/main/java/org/apache/shardingsphere/keygen/uuid/algorithm/UUIDKeyGenerateAlgorithm.java similarity index 90% rename from features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/UUIDKeyGenerateAlgorithm.java rename to infra/algorithm/key-generator/type/uuid/src/main/java/org/apache/shardingsphere/keygen/uuid/algorithm/UUIDKeyGenerateAlgorithm.java index b9b96b4ae5018..01cfa43b9c4d1 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/algorithm/keygen/UUIDKeyGenerateAlgorithm.java +++ b/infra/algorithm/key-generator/type/uuid/src/main/java/org/apache/shardingsphere/keygen/uuid/algorithm/UUIDKeyGenerateAlgorithm.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen; +package org.apache.shardingsphere.keygen.uuid.algorithm; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; diff --git a/infra/algorithm/key-generator/type/uuid/src/main/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm b/infra/algorithm/key-generator/type/uuid/src/main/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm new file mode 100644 index 0000000000000..ce91df42f290f --- /dev/null +++ b/infra/algorithm/key-generator/type/uuid/src/main/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.apache.shardingsphere.keygen.uuid.algorithm.UUIDKeyGenerateAlgorithm diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/UUIDKeyGenerateAlgorithmTest.java b/infra/algorithm/key-generator/type/uuid/src/test/java/org/apache/shardingsphere/keygen/uuid/algorithm/UUIDKeyGenerateAlgorithmTest.java similarity index 95% rename from features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/UUIDKeyGenerateAlgorithmTest.java rename to infra/algorithm/key-generator/type/uuid/src/test/java/org/apache/shardingsphere/keygen/uuid/algorithm/UUIDKeyGenerateAlgorithmTest.java index 9cde0bc22e2b4..b8c65e0eff7e0 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/algorithm/keygen/UUIDKeyGenerateAlgorithmTest.java +++ b/infra/algorithm/key-generator/type/uuid/src/test/java/org/apache/shardingsphere/keygen/uuid/algorithm/UUIDKeyGenerateAlgorithmTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.algorithm.keygen; +package org.apache.shardingsphere.keygen.uuid.algorithm; import org.junit.jupiter.api.Test; diff --git a/infra/algorithm/pom.xml b/infra/algorithm/pom.xml new file mode 100644 index 0000000000000..50db5cd9cd974 --- /dev/null +++ b/infra/algorithm/pom.xml @@ -0,0 +1,33 @@ + + + + + 4.0.0 + + org.apache.shardingsphere + shardingsphere-infra + 5.4.2-SNAPSHOT + + shardingsphere-infra-algorithm + pom + ${project.artifactId} + + + key-generator + + diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/props/PropertiesConverter.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/props/PropertiesConverter.java index b795f47860316..bc3fc0a88f532 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/props/PropertiesConverter.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/props/PropertiesConverter.java @@ -21,6 +21,8 @@ import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.util.json.JsonUtils; +import java.util.LinkedHashMap; +import java.util.Map; import java.util.Properties; /** @@ -36,6 +38,8 @@ public final class PropertiesConverter { * @return converted string content */ public static String convert(final Properties props) { - return JsonUtils.toJsonString(props); + Map sortedProps = new LinkedHashMap<>(); + props.keySet().stream().map(Object::toString).sorted().forEach(each -> sortedProps.put(each, props.get(each))); + return sortedProps.isEmpty() ? "" : JsonUtils.toJsonString(sortedProps); } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/props/PropertiesConverterTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/props/PropertiesConverterTest.java index 657899a3290bc..3b52f18f0d170 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/props/PropertiesConverterTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/props/PropertiesConverterTest.java @@ -35,6 +35,6 @@ void assertConvert() { @Test void assertConvertEmptyProperties() { - assertThat(PropertiesConverter.convert(new Properties()), is("{}")); + assertThat(PropertiesConverter.convert(new Properties()), is("")); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java b/infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/spi/DatabaseSupportedTypedSPI.java similarity index 67% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java rename to infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/spi/DatabaseSupportedTypedSPI.java index 4517de99e726c..b314219326f9c 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java +++ b/infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/spi/DatabaseSupportedTypedSPI.java @@ -15,26 +15,22 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.common.pojo; +package org.apache.shardingsphere.infra.database.core.spi; -import lombok.Getter; -import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI; import java.util.Collection; /** - * Data consistency check algorithm info. + * Database supported SPI. */ -@RequiredArgsConstructor -@Getter -public final class DataConsistencyCheckAlgorithmInfo { +public interface DatabaseSupportedTypedSPI extends TypedSPI { - private final String type; - - private final String typeAliases; - - private final Collection supportedDatabaseTypes; - - private final String description; + /** + * Get supported database types. + * + * @return supported database types + */ + Collection getSupportedDatabaseTypes(); } diff --git a/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/JdbcUrlAppenderTest.java b/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/JdbcUrlAppenderTest.java index 2757038cba746..6c5acc868a5f6 100644 --- a/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/JdbcUrlAppenderTest.java +++ b/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/JdbcUrlAppenderTest.java @@ -39,10 +39,9 @@ void assertAppendQueryPropertiesWithoutOriginalQueryProperties() { @Test void assertAppendQueryPropertiesWithOriginalQueryProperties() { String actual = new JdbcUrlAppender().appendQueryProperties( - "jdbc:trunk://192.168.0.1:3306/foo_ds?serverTimezone=UTC&useSSL=false&rewriteBatchedStatements=true", + "jdbc:trunk://192.168.0.1:3306/foo_ds?useSSL=false&rewriteBatchedStatements=true", PropertiesBuilder.build(new Property("useSSL", Boolean.FALSE.toString()), new Property("rewriteBatchedStatements", Boolean.TRUE.toString()))); assertThat(actual, startsWith("jdbc:trunk://192.168.0.1:3306/foo_ds?")); - assertThat(actual, containsString("serverTimezone=UTC")); assertThat(actual, containsString("rewriteBatchedStatements=true")); assertThat(actual, containsString("useSSL=false")); } diff --git a/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/StandardJdbcUrlParserTest.java b/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/StandardJdbcUrlParserTest.java index b4bd40928302a..3f907a7663e90 100644 --- a/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/StandardJdbcUrlParserTest.java +++ b/infra/database/core/src/test/java/org/apache/shardingsphere/infra/database/core/connector/url/StandardJdbcUrlParserTest.java @@ -37,12 +37,11 @@ void assertParseSimpleJdbcUrl() { @Test void assertParseMySQLJdbcUrl() { - JdbcUrl actual = new StandardJdbcUrlParser().parse("jdbc:mysql://127.0.0.1:3306/demo_ds?serverTimezone=UTC&useSSL=false&sessionVariables=group_concat_max_len=204800,SQL_SAFE_UPDATES=0"); + JdbcUrl actual = new StandardJdbcUrlParser().parse("jdbc:mysql://127.0.0.1:3306/demo_ds?useSSL=false&sessionVariables=group_concat_max_len=204800,SQL_SAFE_UPDATES=0"); assertThat(actual.getHostname(), is("127.0.0.1")); assertThat(actual.getPort(), is(3306)); assertThat(actual.getDatabase(), is("demo_ds")); - assertThat(actual.getQueryProperties().size(), is(3)); - assertThat(actual.getQueryProperties().get("serverTimezone"), is("UTC")); + assertThat(actual.getQueryProperties().size(), is(2)); assertThat(actual.getQueryProperties().get("useSSL"), is(Boolean.FALSE.toString())); assertThat(actual.getQueryProperties().get("sessionVariables"), is("group_concat_max_len=204800,SQL_SAFE_UPDATES=0")); } diff --git a/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParser.java b/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParser.java index ed0527a00307f..8a1770fd7232b 100644 --- a/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParser.java +++ b/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParser.java @@ -54,7 +54,6 @@ private Properties buildDefaultQueryProperties() { result.setProperty("netTimeoutForStreamingResults", "0"); result.setProperty("tinyInt1isBit", Boolean.FALSE.toString()); result.setProperty("useSSL", Boolean.FALSE.toString()); - result.setProperty("serverTimezone", "UTC"); result.setProperty("zeroDateTimeBehavior", "round"); return result; } diff --git a/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParserTest.java b/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParserTest.java index 4128f672f6255..9b348051b1b04 100644 --- a/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParserTest.java +++ b/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/connector/MySQLConnectionPropertiesParserTest.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.infra.database.mysql.connector; +import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; import org.apache.shardingsphere.infra.database.core.connector.url.UnrecognizedDatabaseURLException; -import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -65,8 +65,8 @@ private static class NewConstructorTestCaseArgumentsProvider implements Argument public Stream provideArguments(final ExtensionContext extensionContext) { return Stream.of( Arguments.of("simple", "jdbc:mysql://127.0.0.1/foo_ds", "127.0.0.1", 3306, "foo_ds", null, new Properties()), - Arguments.of("complex", "jdbc:mysql:loadbalance://127.0.0.1:9999,127.0.0.2:9999/foo_ds?serverTimezone=UTC&useSSL=false", "127.0.0.1", 9999, "foo_ds", null, - PropertiesBuilder.build(new Property("serverTimezone", "UTC"), new Property("useSSL", Boolean.FALSE.toString())))); + Arguments.of("complex", "jdbc:mysql:loadbalance://127.0.0.1:9999,127.0.0.2:9999/foo_ds?useSSL=false", "127.0.0.1", 9999, "foo_ds", null, + PropertiesBuilder.build(new Property("useSSL", Boolean.FALSE.toString())))); } } } diff --git a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/ral/query/algorithm/AlgorithmMetaDataQueryResultRow.java b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/ral/query/algorithm/AlgorithmMetaDataQueryResultRow.java new file mode 100644 index 0000000000000..f654cfe04d747 --- /dev/null +++ b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/ral/query/algorithm/AlgorithmMetaDataQueryResultRow.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.distsql.handler.ral.query.algorithm; + +import org.apache.shardingsphere.infra.algorithm.ShardingSphereAlgorithm; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseSupportedTypedSPI; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; +import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; +import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; + +import java.util.Collection; +import java.util.stream.Collectors; + +/** + * Algorithm meta data query result row. + */ +public final class AlgorithmMetaDataQueryResultRow { + + private final boolean containsDatabaseTypes; + + private final String type; + + private final String typeAliases; + + private final String supportedDatabaseTypes; + + private final String description; + + public AlgorithmMetaDataQueryResultRow(final ShardingSphereAlgorithm algorithm) { + containsDatabaseTypes = algorithm instanceof DatabaseSupportedTypedSPI; + type = String.valueOf(algorithm.getType()); + typeAliases = algorithm.getTypeAliases().stream().map(Object::toString).collect(Collectors.joining(",")); + supportedDatabaseTypes = containsDatabaseTypes + ? getSupportedDatabaseTypes(((DatabaseSupportedTypedSPI) algorithm).getSupportedDatabaseTypes()).stream().map(DatabaseType::getType).collect(Collectors.joining(",")) + : ""; + SPIDescription description = algorithm.getClass().getAnnotation(SPIDescription.class); + this.description = null == description ? "" : description.value(); + } + + private Collection getSupportedDatabaseTypes(final Collection supportedDatabaseTypes) { + return supportedDatabaseTypes.isEmpty() ? ShardingSphereServiceLoader.getServiceInstances(DatabaseType.class) : supportedDatabaseTypes; + } + + /** + * To local data query result row. + * + * @return local data query result row + */ + public LocalDataQueryResultRow toLocalDataQueryResultRow() { + return containsDatabaseTypes ? new LocalDataQueryResultRow(type, typeAliases, supportedDatabaseTypes, description) : new LocalDataQueryResultRow(type, typeAliases, description); + } +} diff --git a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/ral/query/algorithm/AlgorithmMetaDataQueryResultRows.java b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/ral/query/algorithm/AlgorithmMetaDataQueryResultRows.java new file mode 100644 index 0000000000000..a53b023c30b5a --- /dev/null +++ b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/ral/query/algorithm/AlgorithmMetaDataQueryResultRows.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.distsql.handler.ral.query.algorithm; + +import org.apache.shardingsphere.infra.algorithm.ShardingSphereAlgorithm; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseSupportedTypedSPI; +import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; +import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; + +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Collectors; + +/** + * Algorithm meta data query result rows. + */ +public final class AlgorithmMetaDataQueryResultRows { + + private final boolean containsDatabaseTypes; + + private final Collection rows; + + public AlgorithmMetaDataQueryResultRows(final Class algorithmClass) { + containsDatabaseTypes = DatabaseSupportedTypedSPI.class.isAssignableFrom(algorithmClass); + rows = ShardingSphereServiceLoader.getServiceInstances(algorithmClass).stream().map(AlgorithmMetaDataQueryResultRow::new).collect(Collectors.toList()); + } + + /** + * Get rows. + * + * @return rows + */ + public Collection getRows() { + return rows.stream().map(AlgorithmMetaDataQueryResultRow::toLocalDataQueryResultRow).collect(Collectors.toList()); + } + + /** + * Get column names. + * + * @return column names + */ + public Collection getColumnNames() { + return containsDatabaseTypes ? Arrays.asList("type", "type_aliases", "supported_database_types", "description") : Arrays.asList("type", "type_aliases", "description"); + } +} diff --git a/infra/pom.xml b/infra/pom.xml index 61cf88285b360..755c1f805ff2e 100644 --- a/infra/pom.xml +++ b/infra/pom.xml @@ -46,5 +46,6 @@ util reachability-metadata nativetest + algorithm diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java index ef8909825be44..2ec99176ea4fd 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java @@ -113,7 +113,7 @@ private MetaDataPersistService mockMetaDataPersistService() { private Map createProperties() { Map result = new LinkedHashMap<>(3, 1F); - result.put("jdbcUrl", "jdbc:mysql://127.0.0.1:3306/demo_ds_0?serverTimezone=UTC&useSSL=false"); + result.put("jdbcUrl", "jdbc:mysql://127.0.0.1:3306/demo_ds_0?useSSL=false"); result.put("username", "root"); result.put("password", "123456"); return result; @@ -121,7 +121,7 @@ private Map createProperties() { private Map mockTrafficDataSourceMap() throws SQLException { MockedDataSource result = new MockedDataSource(mock(Connection.class, RETURNS_DEEP_STUBS)); - result.setUrl("jdbc:mysql://127.0.0.1:3307/logic_db?serverTimezone=UTC&useSSL=false"); + result.setUrl("jdbc:mysql://127.0.0.1:3307/logic_db?useSSL=false"); result.setUsername("root"); result.setPassword("123456"); when(result.getConnection().getMetaData().getURL()).thenReturn(result.getUrl()); @@ -165,7 +165,7 @@ void assertGetConnectionWhenConfigTrafficRule() throws SQLException { assertThat(actual, is(databaseConnectionManager.getConnections("127.0.0.1@3307", 0, 1, ConnectionMode.MEMORY_STRICTLY))); assertThat(actual.size(), is(1)); assertThat(actual.get(0).getMetaData().getUserName(), is("root")); - assertThat(actual.get(0).getMetaData().getURL(), is("jdbc:mysql://127.0.0.1:3307/logic_db?serverTimezone=UTC&useSSL=false")); + assertThat(actual.get(0).getMetaData().getURL(), is("jdbc:mysql://127.0.0.1:3307/logic_db?useSSL=false")); } @Test @@ -183,7 +183,7 @@ void assertGetConnectionsWhenConfigTrafficRuleAndAllInCache() throws SQLExceptio assertThat(actual.size(), is(1)); assertThat(actual.get(0), is(expected)); assertThat(actual.get(0).getMetaData().getUserName(), is("root")); - assertThat(actual.get(0).getMetaData().getURL(), is("jdbc:mysql://127.0.0.1:3307/logic_db?serverTimezone=UTC&useSSL=false")); + assertThat(actual.get(0).getMetaData().getURL(), is("jdbc:mysql://127.0.0.1:3307/logic_db?useSSL=false")); } @Test @@ -197,7 +197,7 @@ void assertGetConnectionsWhenConfigTrafficRuleAndEmptyCache() throws SQLExceptio List actual = databaseConnectionManager.getConnections("127.0.0.1@3307", 0, 1, ConnectionMode.MEMORY_STRICTLY); assertThat(actual.size(), is(1)); assertThat(actual.get(0).getMetaData().getUserName(), is("root")); - assertThat(actual.get(0).getMetaData().getURL(), is("jdbc:mysql://127.0.0.1:3307/logic_db?serverTimezone=UTC&useSSL=false")); + assertThat(actual.get(0).getMetaData().getURL(), is("jdbc:mysql://127.0.0.1:3307/logic_db?useSSL=false")); } @Test diff --git a/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/ShardingSpherePipelineDataSourceConfigurationTest.java b/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/ShardingSpherePipelineDataSourceConfigurationTest.java index ea77113d05343..792e0acbe08a8 100644 --- a/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/ShardingSpherePipelineDataSourceConfigurationTest.java +++ b/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/ShardingSpherePipelineDataSourceConfigurationTest.java @@ -75,11 +75,11 @@ private String getDataSourceYaml() { + " minPoolSize: 20\n" + " minimumIdle: 20\n" + " dataSourceClassName: com.zaxxer.hikari.HikariDataSource\n" - + " url: jdbc:mysql://192.168.0.2:3306/ds_1?serverTimezone=UTC&useSSL=false\n" + + " url: jdbc:mysql://192.168.0.2:3306/ds_1?useSSL=false\n" + " ds_0:\n" + " minPoolSize: 20\n" + " minimumIdle: 20\n" + " dataSourceClassName: com.zaxxer.hikari.HikariDataSource\n" - + " url: jdbc:mysql://192.168.0.1:3306/ds_0?serverTimezone=UTC&useSSL=false\n"; + + " url: jdbc:mysql://192.168.0.1:3306/ds_0?useSSL=false\n"; } } diff --git a/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/StandardPipelineDataSourceConfigurationTest.java b/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/StandardPipelineDataSourceConfigurationTest.java index b5bd9f6aa90d6..610f63f15886e 100644 --- a/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/StandardPipelineDataSourceConfigurationTest.java +++ b/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/type/StandardPipelineDataSourceConfigurationTest.java @@ -31,7 +31,7 @@ class StandardPipelineDataSourceConfigurationTest { - private static final String JDBC_URL = "jdbc:mysql://127.0.0.1:3306/demo_ds?serverTimezone=UTC&useSSL=false"; + private static final String JDBC_URL = "jdbc:mysql://127.0.0.1:3306/demo_ds?useSSL=false"; private static final String USERNAME = "userName"; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/InventoryIncrementalJobItemContext.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/InventoryIncrementalJobItemContext.java index 8a2ca947905f9..ae0cd2f48c2a9 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/InventoryIncrementalJobItemContext.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/InventoryIncrementalJobItemContext.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.data.pipeline.common.context; -import org.apache.shardingsphere.data.pipeline.common.metadata.loader.PipelineTableMetaDataLoader; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.listener.PipelineJobProgressListener; +import org.apache.shardingsphere.data.pipeline.common.metadata.loader.PipelineTableMetaDataLoader; import org.apache.shardingsphere.data.pipeline.core.importer.sink.PipelineSink; import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; @@ -88,4 +88,9 @@ public interface InventoryIncrementalJobItemContext extends PipelineJobItemConte * @return inventory records count */ long getInventoryRecordsCount(); + + @Override + default InventoryIncrementalJobItemProgress toProgress() { + return new InventoryIncrementalJobItemProgress(this); + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineJobItemContext.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineJobItemContext.java index ded73005aed39..154aa65943866 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineJobItemContext.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineJobItemContext.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.PipelineJobItemProgress; /** * Pipeline job item context. @@ -85,4 +86,11 @@ public interface PipelineJobItemContext { * @return stopping */ boolean isStopping(); + + /** + * Convert to pipeline job item progress. + * + * @return converted pipeline job item progress + */ + PipelineJobItemProgress toProgress(); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java index f43db99d70f47..0a1ac665b29d1 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java @@ -22,6 +22,7 @@ import lombok.Setter; import lombok.ToString; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; import java.util.Map; @@ -34,9 +35,6 @@ // TODO Refactor structure, List public final class ConsistencyCheckJobItemProgress implements PipelineJobItemProgress { - @Setter - private JobStatus status = JobStatus.RUNNING; - private final String tableNames; private final String ignoredTableNames; @@ -54,4 +52,19 @@ public final class ConsistencyCheckJobItemProgress implements PipelineJobItemPro private final Map targetTableCheckPositions; private final String sourceDatabaseType; + + @Setter + private JobStatus status = JobStatus.RUNNING; + + public ConsistencyCheckJobItemProgress(final ConsistencyCheckJobItemProgressContext context) { + tableNames = String.join(",", context.getTableNames()); + ignoredTableNames = String.join(",", context.getIgnoredTableNames()); + checkedRecordsCount = context.getCheckedRecordsCount().get(); + recordsCount = context.getRecordsCount(); + checkBeginTimeMillis = context.getCheckBeginTimeMillis(); + checkEndTimeMillis = context.getCheckEndTimeMillis(); + sourceTableCheckPositions = context.getSourceTableCheckPositions(); + targetTableCheckPositions = context.getTargetTableCheckPositions(); + sourceDatabaseType = context.getSourceDatabaseType(); + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgress.java index 043b00b9f43ab..d0ffdb41df45a 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgress.java @@ -18,30 +18,62 @@ package org.apache.shardingsphere.data.pipeline.common.job.progress; import lombok.Getter; +import lombok.NoArgsConstructor; import lombok.Setter; +import org.apache.shardingsphere.data.pipeline.common.context.InventoryIncrementalJobItemContext; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.task.progress.IncrementalTaskProgress; +import org.apache.shardingsphere.data.pipeline.common.task.progress.InventoryTaskProgress; +import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + /** * Inventory incremental job item progress. */ +@NoArgsConstructor @Getter @Setter public final class InventoryIncrementalJobItemProgress implements PipelineJobItemProgress { - private JobStatus status = JobStatus.RUNNING; - private DatabaseType sourceDatabaseType; private String dataSourceName; - private boolean active; - private JobItemInventoryTasksProgress inventory; private JobItemIncrementalTasksProgress incremental; + private long inventoryRecordsCount; + private long processedRecordsCount; - private long inventoryRecordsCount; + private boolean active; + + private JobStatus status = JobStatus.RUNNING; + + public InventoryIncrementalJobItemProgress(final InventoryIncrementalJobItemContext context) { + sourceDatabaseType = context.getJobConfig().getSourceDatabaseType(); + dataSourceName = context.getDataSourceName(); + inventory = getInventoryTasksProgress(context.getInventoryTasks()); + incremental = getIncrementalTasksProgress(context.getIncrementalTasks()); + inventoryRecordsCount = context.getInventoryRecordsCount(); + processedRecordsCount = context.getProcessedRecordsCount(); + status = context.getStatus(); + } + + private JobItemIncrementalTasksProgress getIncrementalTasksProgress(final Collection incrementalTasks) { + return new JobItemIncrementalTasksProgress(incrementalTasks.isEmpty() ? null : (IncrementalTaskProgress) incrementalTasks.iterator().next().getTaskProgress()); + } + + private JobItemInventoryTasksProgress getInventoryTasksProgress(final Collection inventoryTasks) { + Map inventoryTaskProgressMap = new HashMap<>(); + for (PipelineTask each : inventoryTasks) { + inventoryTaskProgressMap.put(each.getTaskId(), (InventoryTaskProgress) each.getTaskProgress()); + } + return new JobItemInventoryTasksProgress(inventoryTaskProgressMap); + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/PipelineJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/PipelineJobItemProgress.java index 4153e74073306..3c9770a369f1b 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/PipelineJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/PipelineJobItemProgress.java @@ -30,4 +30,11 @@ public interface PipelineJobItemProgress { * @return job status */ JobStatus getStatus(); + + /** + * Set status. + * + * @param jobStatus job status + */ + void setStatus(JobStatus jobStatus); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java index 2ed595408b3ad..72ea55e18654f 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java @@ -19,7 +19,7 @@ import lombok.Getter; import lombok.Setter; -import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressConfiguration; import java.util.LinkedHashMap; import java.util.Map; @@ -29,7 +29,7 @@ */ @Getter @Setter -public final class YamlConsistencyCheckJobItemProgress implements YamlConfiguration { +public final class YamlConsistencyCheckJobItemProgress implements YamlPipelineJobItemProgressConfiguration { private String status; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java index c399505edf2bb..eb582d1d89609 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java @@ -19,12 +19,12 @@ import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.progress.ConsistencyCheckJobItemProgress; -import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressSwapper; /** * YAML data check job item progress swapper. */ -public final class YamlConsistencyCheckJobItemProgressSwapper implements YamlConfigurationSwapper { +public final class YamlConsistencyCheckJobItemProgressSwapper implements YamlPipelineJobItemProgressSwapper { @Override public YamlConsistencyCheckJobItemProgress swapToYamlConfiguration(final ConsistencyCheckJobItemProgress data) { @@ -50,4 +50,9 @@ public ConsistencyCheckJobItemProgress swapToObject(final YamlConsistencyCheckJo result.setStatus(JobStatus.valueOf(yamlConfig.getStatus())); return result; } + + @Override + public Class getYamlProgressClass() { + return YamlConsistencyCheckJobItemProgress.class; + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgress.java index e73596ce66766..1e44dcf8984bb 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgress.java @@ -19,14 +19,14 @@ import lombok.Getter; import lombok.Setter; -import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressConfiguration; /** * YAML inventory incremental job item progress. */ @Getter @Setter -public final class YamlInventoryIncrementalJobItemProgress implements YamlConfiguration { +public final class YamlInventoryIncrementalJobItemProgress implements YamlPipelineJobItemProgressConfiguration { private String status; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapper.java index 696a4e5f34eb4..1b29d4f52de0d 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapper.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapper.java @@ -19,14 +19,14 @@ import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressSwapper; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; /** * YAML inventory incremental job item progress swapper. */ -public final class YamlInventoryIncrementalJobItemProgressSwapper implements YamlConfigurationSwapper { +public final class YamlInventoryIncrementalJobItemProgressSwapper implements YamlPipelineJobItemProgressSwapper { private final YamlJobItemInventoryTasksProgressSwapper inventoryTasksProgressSwapper = new YamlJobItemInventoryTasksProgressSwapper(); @@ -57,4 +57,9 @@ public InventoryIncrementalJobItemProgress swapToObject(final YamlInventoryIncre result.setInventoryRecordsCount(yamlProgress.getInventoryRecordsCount()); return result; } + + @Override + public Class getYamlProgressClass() { + return YamlInventoryIncrementalJobItemProgress.class; + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java index adb79c3255405..0aa0844db660e 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java @@ -65,9 +65,20 @@ public static boolean recordsEquals(final Map thisRecord, final return true; } + /** + * Whether column values are matched or not. + * + * @param equalsBuilder equals builder + * @param thisColumnValue this column value + * @param thatColumnValue that column value + * @return true if matched, otherwise false + */ @SneakyThrows(SQLException.class) - private static boolean isMatched(final EqualsBuilder equalsBuilder, final Object thisColumnValue, final Object thatColumnValue) { + public static boolean isMatched(final EqualsBuilder equalsBuilder, final Object thisColumnValue, final Object thatColumnValue) { equalsBuilder.reset(); + if (isInteger(thisColumnValue) && isInteger(thatColumnValue)) { + return isIntegerEquals((Number) thisColumnValue, (Number) thatColumnValue); + } if (thisColumnValue instanceof SQLXML && thatColumnValue instanceof SQLXML) { return ((SQLXML) thisColumnValue).getString().equals(((SQLXML) thatColumnValue).getString()); } @@ -80,6 +91,17 @@ private static boolean isMatched(final EqualsBuilder equalsBuilder, final Object return equalsBuilder.append(thisColumnValue, thatColumnValue).isEquals(); } + private static boolean isInteger(final Object value) { + if (!(value instanceof Number)) { + return false; + } + return value instanceof Long || value instanceof Integer || value instanceof Short || value instanceof Byte; + } + + private static boolean isIntegerEquals(final Number one, final Number another) { + return one.longValue() == another.longValue(); + } + /** * Check two BigDecimal whether equals or not. * diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java index 1db10f4352b37..8bd104982f04d 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java @@ -26,7 +26,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Optional; /** @@ -66,12 +65,12 @@ public boolean equals(final Object o) { return false; } final RecordSingleTableInventoryCalculatedResult that = (RecordSingleTableInventoryCalculatedResult) o; - if (recordsCount != that.recordsCount || !Objects.equals(maxUniqueKeyValue, that.maxUniqueKeyValue)) { + EqualsBuilder equalsBuilder = new EqualsBuilder(); + if (recordsCount != that.recordsCount || !DataConsistencyCheckUtils.isMatched(equalsBuilder, maxUniqueKeyValue, that.maxUniqueKeyValue)) { log.warn("Record count or max unique key value not match, recordCount1={}, recordCount2={}, maxUniqueKeyValue1={}, maxUniqueKeyValue2={}.", recordsCount, that.recordsCount, maxUniqueKeyValue, that.maxUniqueKeyValue); return false; } - EqualsBuilder equalsBuilder = new EqualsBuilder(); Iterator> thisRecordsIterator = records.iterator(); Iterator> thatRecordsIterator = that.records.iterator(); while (thisRecordsIterator.hasNext() && thatRecordsIterator.hasNext()) { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java index 010e504f66d3a..ddd078b460184 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java @@ -18,14 +18,12 @@ package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; import org.apache.shardingsphere.infra.algorithm.ShardingSphereAlgorithm; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; - -import java.util.Collection; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseSupportedTypedSPI; /** * Table data consistency checker. */ -public interface TableDataConsistencyChecker extends ShardingSphereAlgorithm, AutoCloseable { +public interface TableDataConsistencyChecker extends ShardingSphereAlgorithm, DatabaseSupportedTypedSPI, AutoCloseable { /** * Build table inventory checker. @@ -44,13 +42,6 @@ default boolean isBreakOnInventoryCheckNotMatched() { return true; } - /** - * Get supported database types. - * - * @return supported database types - */ - Collection getSupportedDatabaseTypes(); - @Override void close(); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java index 197c09e2d4948..36899a266c824 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/AbstractSimplePipelineJob.java @@ -20,6 +20,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.data.pipeline.common.context.PipelineJobItemContext; import org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobNotFoundException; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner; import org.apache.shardingsphere.elasticjob.api.ShardingContext; @@ -48,6 +49,7 @@ protected AbstractSimplePipelineJob(final String jobId) { @Override public void execute(final ShardingContext shardingContext) { PipelineJobManager jobManager = new PipelineJobManager(getJobAPI()); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(getJobAPI().getYamlJobItemProgressSwapper()); String jobId = shardingContext.getJobName(); int shardingItem = shardingContext.getShardingItem(); log.info("Execute job {}-{}", jobId, shardingItem); @@ -57,31 +59,31 @@ public void execute(final ShardingContext shardingContext) { } try { PipelineJobItemContext jobItemContext = buildPipelineJobItemContext(shardingContext); - execute0(jobManager, jobItemContext); + execute0(jobItemManager, jobItemContext); // CHECKSTYLE:OFF } catch (final RuntimeException ex) { // CHECKSTYLE:ON - processFailed(jobManager, jobId, shardingItem, ex); + processFailed(jobManager, jobItemManager, jobId, shardingItem, ex); throw ex; } } - private void execute0(final PipelineJobManager jobManager, final PipelineJobItemContext jobItemContext) { + private void execute0(final PipelineJobItemManager jobItemManager, final PipelineJobItemContext jobItemContext) { String jobId = jobItemContext.getJobId(); int shardingItem = jobItemContext.getShardingItem(); PipelineTasksRunner tasksRunner = buildPipelineTasksRunner(jobItemContext); if (!addTasksRunner(shardingItem, tasksRunner)) { return; } - jobManager.cleanJobItemErrorMessage(jobId, shardingItem); + jobItemManager.cleanErrorMessage(jobId, shardingItem); prepare(jobItemContext); log.info("start tasks runner, jobId={}, shardingItem={}", jobId, shardingItem); tasksRunner.start(); } - private void processFailed(final PipelineJobManager jobManager, final String jobId, final int shardingItem, final Exception ex) { + private void processFailed(final PipelineJobManager jobManager, final PipelineJobItemManager jobItemManager, final String jobId, final int shardingItem, final Exception ex) { log.error("job execution failed, {}-{}", jobId, shardingItem, ex); - jobManager.updateJobItemErrorMessage(jobId, shardingItem, ex); + jobItemManager.updateErrorMessage(jobId, shardingItem, ex); try { jobManager.stop(jobId); } catch (final PipelineJobNotFoundException ignored) { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java index 478cd13bb6c93..143ba94c50b6e 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorThreadFactoryBuilder; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -129,7 +130,8 @@ private static synchronized void persist(final String jobId, final int shardingI } persistContext.getHasNewEvents().set(false); long startTimeMillis = System.currentTimeMillis(); - TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobId).getType()).updateJobItemProgress(jobItemContext.get()); + new PipelineJobItemManager<>(TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobId).getType()) + .getYamlJobItemProgressSwapper()).updateProgress(jobItemContext.get()); persistContext.getBeforePersistingProgressMillis().set(null); if (6 == ThreadLocalRandom.current().nextInt(100)) { log.info("persist, jobId={}, shardingItem={}, cost {} ms", jobId, shardingItem, System.currentTimeMillis() - startTimeMillis); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java index 487555edcb51a..76a66b00e6b60 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java @@ -22,27 +22,25 @@ import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; import org.apache.shardingsphere.data.pipeline.common.context.InventoryIncrementalProcessContext; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; -import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; -import org.apache.shardingsphere.data.pipeline.common.job.progress.JobOffsetInfo; -import org.apache.shardingsphere.data.pipeline.common.pojo.DataConsistencyCheckAlgorithmInfo; -import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; +import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlInventoryIncrementalJobItemProgressSwapper; import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobInfo; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.data.pipeline.core.task.config.PipelineTaskConfiguration; import java.sql.SQLException; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Optional; /** * Inventory incremental job API. */ public interface InventoryIncrementalJobAPI extends PipelineJobAPI { + @SuppressWarnings("unchecked") + @Override + default YamlInventoryIncrementalJobItemProgressSwapper getYamlJobItemProgressSwapper() { + return new YamlInventoryIncrementalJobItemProgressSwapper(); + } + /** * Get pipeline job info. * @@ -77,64 +75,6 @@ public interface InventoryIncrementalJobAPI extends PipelineJobAPI { */ void extendYamlJobConfiguration(PipelineContextKey contextKey, YamlPipelineJobConfiguration yamlJobConfig); - /** - * Alter process configuration. - * - * @param contextKey context key - * @param processConfig process configuration - */ - void alterProcessConfiguration(PipelineContextKey contextKey, PipelineProcessConfiguration processConfig); - - /** - * Show process configuration. - * - * @param contextKey context key - * @return process configuration, non-null - */ - PipelineProcessConfiguration showProcessConfiguration(PipelineContextKey contextKey); - - /** - * Persist job offset info. - * - * @param jobId job ID - * @param jobOffsetInfo job offset info - */ - void persistJobOffsetInfo(String jobId, JobOffsetInfo jobOffsetInfo); - - /** - * Get job offset info. - * - * @param jobId job ID - * @return job offset progress - */ - JobOffsetInfo getJobOffsetInfo(String jobId); - - /** - * Get job progress. - * - * @param pipelineJobConfig job configuration - * @return each sharding item progress - */ - Map getJobProgress(PipelineJobConfiguration pipelineJobConfig); - - @Override - Optional getJobItemProgress(String jobId, int shardingItem); - - /** - * Get job infos. - * - * @param jobId job ID - * @return job item infos - */ - List getJobItemInfos(String jobId); - - /** - * List all data consistency check algorithms from SPI. - * - * @return data consistency check algorithms - */ - Collection listDataConsistencyCheckAlgorithms(); - /** * Build pipeline data consistency checker. * @@ -146,15 +86,6 @@ public interface InventoryIncrementalJobAPI extends PipelineJobAPI { PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(PipelineJobConfiguration pipelineJobConfig, InventoryIncrementalProcessContext processContext, ConsistencyCheckJobItemProgressContext progressContext); - /** - * Aggregate data consistency check results. - * - * @param jobId job ID - * @param checkResults check results - * @return check success or not - */ - boolean aggregateDataConsistencyCheckResults(String jobId, Map checkResults); - /** * Commit pipeline job. * diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobManager.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobManager.java new file mode 100644 index 0000000000000..2ef78b7e8bf25 --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobManager.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.job.service; + +import com.google.common.base.Preconditions; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; +import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; +import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfigurationUtils; +import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; +import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; +import org.apache.shardingsphere.data.pipeline.common.job.progress.JobOffsetInfo; +import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlJobOffsetInfo; +import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlJobOffsetInfoSwapper; +import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; +import org.apache.shardingsphere.data.pipeline.common.pojo.TableBasedPipelineJobInfo; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.metadata.PipelineProcessConfigurationPersistService; +import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; +import org.apache.shardingsphere.infra.util.yaml.YamlEngine; + +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.stream.IntStream; + +/** + * Inventory incremental job manager. + */ +@RequiredArgsConstructor +public final class InventoryIncrementalJobManager { + + private final InventoryIncrementalJobAPI jobAPI; + + private final PipelineProcessConfigurationPersistService processConfigPersistService = new PipelineProcessConfigurationPersistService(); + + /** + * Alter process configuration. + * + * @param contextKey context key + * @param processConfig process configuration + */ + public void alterProcessConfiguration(final PipelineContextKey contextKey, final PipelineProcessConfiguration processConfig) { + // TODO check rateLimiter type match or not + processConfigPersistService.persist(contextKey, jobAPI.getType(), processConfig); + } + + /** + * Show process configuration. + * + * @param contextKey context key + * @return process configuration, non-null + */ + public PipelineProcessConfiguration showProcessConfiguration(final PipelineContextKey contextKey) { + return PipelineProcessConfigurationUtils.convertWithDefaultValue(processConfigPersistService.load(contextKey, jobAPI.getType())); + } + + /** + * Get job infos. + * + * @param jobId job ID + * @return job item infos + */ + public List getJobItemInfos(final String jobId) { + PipelineJobConfiguration jobConfig = new PipelineJobManager(jobAPI).getJobConfiguration(jobId); + long startTimeMillis = Long.parseLong(Optional.ofNullable(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId).getProps().getProperty("start_time_millis")).orElse("0")); + Map jobProgress = getJobProgress(jobConfig); + List result = new LinkedList<>(); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); + for (Entry entry : jobProgress.entrySet()) { + int shardingItem = entry.getKey(); + TableBasedPipelineJobInfo jobInfo = (TableBasedPipelineJobInfo) jobAPI.getJobInfo(jobId); + InventoryIncrementalJobItemProgress jobItemProgress = entry.getValue(); + String errorMessage = jobItemManager.getErrorMessage(jobId, shardingItem); + if (null == jobItemProgress) { + result.add(new InventoryIncrementalJobItemInfo(shardingItem, jobInfo.getTable(), null, startTimeMillis, 0, errorMessage)); + continue; + } + int inventoryFinishedPercentage = 0; + if (JobStatus.EXECUTE_INCREMENTAL_TASK == jobItemProgress.getStatus() || JobStatus.FINISHED == jobItemProgress.getStatus()) { + inventoryFinishedPercentage = 100; + } else if (0 != jobItemProgress.getProcessedRecordsCount() && 0 != jobItemProgress.getInventoryRecordsCount()) { + inventoryFinishedPercentage = (int) Math.min(100, jobItemProgress.getProcessedRecordsCount() * 100 / jobItemProgress.getInventoryRecordsCount()); + } + result.add(new InventoryIncrementalJobItemInfo(shardingItem, jobInfo.getTable(), jobItemProgress, startTimeMillis, inventoryFinishedPercentage, errorMessage)); + } + return result; + } + + /** + * Get job progress. + * + * @param jobConfig pipeline job configuration + * @return each sharding item progress + */ + public Map getJobProgress(final PipelineJobConfiguration jobConfig) { + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); + String jobId = jobConfig.getJobId(); + JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId); + return IntStream.range(0, jobConfig.getJobShardingCount()).boxed().collect(LinkedHashMap::new, (map, each) -> { + Optional jobItemProgress = jobItemManager.getProgress(jobId, each); + jobItemProgress.ifPresent(optional -> optional.setActive(!jobConfigPOJO.isDisabled())); + map.put(each, jobItemProgress.orElse(null)); + }, LinkedHashMap::putAll); + } + + /** + * Persist job offset info. + * + * @param jobId job ID + * @param jobOffsetInfo job offset info + */ + public void persistJobOffsetInfo(final String jobId, final JobOffsetInfo jobOffsetInfo) { + String value = YamlEngine.marshal(new YamlJobOffsetInfoSwapper().swapToYamlConfiguration(jobOffsetInfo)); + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).persistJobOffsetInfo(jobId, value); + } + + /** + * Get job offset info. + * + * @param jobId job ID + * @return job offset progress + */ + public JobOffsetInfo getJobOffsetInfo(final String jobId) { + Optional offsetInfo = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobOffsetInfo(jobId); + return new YamlJobOffsetInfoSwapper().swapToObject(offsetInfo.isPresent() ? YamlEngine.unmarshal(offsetInfo.get(), YamlJobOffsetInfo.class) : new YamlJobOffsetInfo()); + } + + /** + * Aggregate data consistency check results. + * + * @param jobId job ID + * @param checkResults check results + * @return check success or not + */ + public boolean aggregateDataConsistencyCheckResults(final String jobId, final Map checkResults) { + Preconditions.checkArgument(!checkResults.isEmpty(), "checkResults empty, jobId:", jobId); + return checkResults.values().stream().allMatch(TableDataConsistencyCheckResult::isMatched); + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobAPI.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobAPI.java index 4ac59eefcf944..87ae49520dcc0 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobAPI.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobAPI.java @@ -17,12 +17,11 @@ package org.apache.shardingsphere.data.pipeline.core.job.service; -import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; -import org.apache.shardingsphere.data.pipeline.common.context.PipelineJobItemContext; -import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.PipelineJob; import org.apache.shardingsphere.data.pipeline.common.job.progress.PipelineJobItemProgress; -import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobConfigurationSwapper; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressConfiguration; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressSwapper; import org.apache.shardingsphere.infra.spi.annotation.SingletonSPI; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI; @@ -35,12 +34,19 @@ public interface PipelineJobAPI extends TypedSPI { /** - * Get job configuration. - * - * @param jobConfigPOJO job configuration POJO - * @return pipeline job configuration + * Get YAML pipeline job configuration swapper. + * + * @return YAML pipeline job configuration swapper + */ + YamlPipelineJobConfigurationSwapper getYamlJobConfigurationSwapper(); + + /** + * Get YAML pipeline job item progress swapper. + * + * @param type of pipeline job item progress + * @return YAML pipeline job item progress swapper */ - PipelineJobConfiguration getJobConfiguration(JobConfigurationPOJO jobConfigPOJO); + YamlPipelineJobItemProgressSwapper getYamlJobItemProgressSwapper(); /** * Whether to ignore to start disabled job when job item progress is finished. @@ -69,44 +75,12 @@ default Optional getToBeStoppedPreviousJobType() { return Optional.empty(); } - /** - * Persist job item progress. - * - * @param jobItemContext job item context - */ - void persistJobItemProgress(PipelineJobItemContext jobItemContext); - - /** - * Update job item progress. - * - * @param jobItemContext job item context - */ - void updateJobItemProgress(PipelineJobItemContext jobItemContext); - - /** - * Get job item progress. - * - * @param jobId job id - * @param shardingItem sharding item - * @return job item progress, may be null - */ - Optional getJobItemProgress(String jobId, int shardingItem); - - /** - * Update job item status. - * - * @param jobId job id - * @param shardingItem sharding item - * @param status status - */ - void updateJobItemStatus(String jobId, int shardingItem, JobStatus status); - /** * Get pipeline job class. * * @return pipeline job class */ - Class getPipelineJobClass(); + Class getJobClass(); @Override String getType(); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobItemManager.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobItemManager.java new file mode 100644 index 0000000000000..9aa4a01cbb38e --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobItemManager.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.job.service; + +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.shardingsphere.data.pipeline.common.context.PipelineJobItemContext; +import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.PipelineJobItemProgress; +import org.apache.shardingsphere.data.pipeline.common.metadata.node.PipelineMetaDataNode; +import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressConfiguration; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobItemProgressSwapper; +import org.apache.shardingsphere.infra.util.yaml.YamlEngine; + +import java.util.Optional; + +/** + * Pipeline job manager. + * + * @param type of pipeline job item progress + */ +public final class PipelineJobItemManager { + + private final YamlPipelineJobItemProgressSwapper swapper; + + @SuppressWarnings({"rawtypes", "unchecked"}) + public PipelineJobItemManager(final YamlPipelineJobItemProgressSwapper swapper) { + this.swapper = swapper; + } + + /** + * Update job item status. + * + * @param jobId job id + * @param shardingItem sharding item + * @param status status + */ + public void updateStatus(final String jobId, final int shardingItem, final JobStatus status) { + Optional jobItemProgress = getProgress(jobId, shardingItem); + if (!jobItemProgress.isPresent()) { + return; + } + jobItemProgress.get().setStatus(status); + PipelineAPIFactory.getGovernanceRepositoryAPI( + PipelineJobIdUtils.parseContextKey(jobId)).updateJobItemProgress(jobId, shardingItem, YamlEngine.marshal(swapper.swapToYamlConfiguration(jobItemProgress.get()))); + } + + /** + * Get job item progress. + * + * @param jobId job id + * @param shardingItem sharding item + * @return job item progress + */ + public Optional getProgress(final String jobId, final int shardingItem) { + return PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobItemProgress(jobId, shardingItem) + .map(optional -> swapper.swapToObject(YamlEngine.unmarshal(optional, swapper.getYamlProgressClass(), true))); + } + + /** + * Persist job item progress. + * + * @param jobItemContext job item context + */ + public void persistProgress(final PipelineJobItemContext jobItemContext) { + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobItemContext.getJobId())) + .persistJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem(), convertProgressYamlContent(jobItemContext)); + } + + /** + * Update job item progress. + * + * @param jobItemContext job item context + */ + public void updateProgress(final PipelineJobItemContext jobItemContext) { + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobItemContext.getJobId())) + .updateJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem(), convertProgressYamlContent(jobItemContext)); + } + + @SuppressWarnings("unchecked") + private String convertProgressYamlContent(final PipelineJobItemContext jobItemContext) { + return YamlEngine.marshal(swapper.swapToYamlConfiguration((T) jobItemContext.toProgress())); + } + + /** + * Get job item error message. + * + * @param jobId job id + * @param shardingItem sharding item + * @return map, key is sharding item, value is error message + */ + public String getErrorMessage(final String jobId, final int shardingItem) { + return Optional.ofNullable(PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobItemErrorMessage(jobId, shardingItem)).orElse(""); + } + + /** + * Update job item error message. + * + * @param jobId job id + * @param shardingItem sharding item + * @param error error + */ + public void updateErrorMessage(final String jobId, final int shardingItem, final Object error) { + String key = PipelineMetaDataNode.getJobItemErrorMessagePath(jobId, shardingItem); + String value = ""; + if (null != error) { + value = error instanceof Throwable ? ExceptionUtils.getStackTrace((Throwable) error) : error.toString(); + } + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).update(key, value); + } + + /** + * Clean job item error message. + * + * @param jobId job id + * @param shardingItem sharding item + */ + public void cleanErrorMessage(final String jobId, final int shardingItem) { + String key = PipelineMetaDataNode.getJobItemErrorMessagePath(jobId, shardingItem); + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).persist(key, ""); + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobManager.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobManager.java index 47860f1400c53..b01ce97c1f25c 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobManager.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/PipelineJobManager.java @@ -19,7 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; @@ -32,7 +31,6 @@ import org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobHasAlreadyStartedException; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; -import org.apache.shardingsphere.elasticjob.lite.lifecycle.domain.JobBriefInfo; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; @@ -44,7 +42,6 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import java.util.stream.Stream; /** * Pipeline job manager. @@ -55,7 +52,19 @@ public final class PipelineJobManager { private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); - private final PipelineJobAPI pipelineJobAPI; + private final PipelineJobAPI jobAPI; + + /** + * Get job configuration. + * + * @param jobId job ID + * @param type of pipeline job configuration + * @return pipeline job configuration + */ + @SuppressWarnings("unchecked") + public T getJobConfiguration(final String jobId) { + return (T) jobAPI.getYamlJobConfigurationSwapper().swapToObject(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId).getJobParameter()); + } /** * Start job. @@ -72,7 +81,7 @@ public Optional start(final PipelineJobConfiguration jobConfig) { log.warn("jobId already exists in registry center, ignore, jobConfigKey={}", jobConfigKey); return Optional.of(jobId); } - repositoryAPI.persist(PipelineMetaDataNode.getJobRootPath(jobId), pipelineJobAPI.getPipelineJobClass().getName()); + repositoryAPI.persist(PipelineMetaDataNode.getJobRootPath(jobId), jobAPI.getJobClass().getName()); repositoryAPI.persist(jobConfigKey, YamlEngine.marshal(jobConfig.convertToJobConfigurationPOJO())); return Optional.of(jobId); } @@ -83,15 +92,15 @@ public Optional start(final PipelineJobConfiguration jobConfig) { * @param jobId job id */ public void startDisabledJob(final String jobId) { - if (pipelineJobAPI.isIgnoreToStartDisabledJobWhenJobItemProgressIsFinished()) { - Optional jobItemProgress = pipelineJobAPI.getJobItemProgress(jobId, 0); + if (jobAPI.isIgnoreToStartDisabledJobWhenJobItemProgressIsFinished()) { + Optional jobItemProgress = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()).getProgress(jobId, 0); if (jobItemProgress.isPresent() && JobStatus.FINISHED == jobItemProgress.get().getStatus()) { log.info("job status is FINISHED, ignore, jobId={}", jobId); return; } } startCurrentDisabledJob(jobId); - pipelineJobAPI.getToBeStartDisabledNextJobType().ifPresent(optional -> startNextDisabledJob(jobId, optional)); + jobAPI.getToBeStartDisabledNextJobType().ifPresent(optional -> startNextDisabledJob(jobId, optional)); } @@ -129,7 +138,7 @@ private void startNextDisabledJob(final String jobId, final String toBeStartDisa * @param jobId job id */ public void stop(final String jobId) { - pipelineJobAPI.getToBeStoppedPreviousJobType().ifPresent(optional -> stopPreviousJob(jobId, optional)); + jobAPI.getToBeStoppedPreviousJobType().ifPresent(optional -> stopPreviousJob(jobId, optional)); stopCurrentJob(jobId); } @@ -178,53 +187,12 @@ public void drop(final String jobId) { * @param contextKey context key * @return jobs info */ - public List getPipelineJobInfos(final PipelineContextKey contextKey) { - if (pipelineJobAPI instanceof InventoryIncrementalJobAPI) { - return getJobBriefInfos(contextKey, pipelineJobAPI.getType()).map(each -> ((InventoryIncrementalJobAPI) pipelineJobAPI).getJobInfo(each.getJobName())).collect(Collectors.toList()); + public List getJobInfos(final PipelineContextKey contextKey) { + if (jobAPI instanceof InventoryIncrementalJobAPI) { + return PipelineAPIFactory.getJobStatisticsAPI(contextKey).getAllJobsBriefInfo().stream() + .filter(each -> !each.getJobName().startsWith("_") && jobAPI.getType().equals(PipelineJobIdUtils.parseJobType(each.getJobName()).getType())) + .map(each -> ((InventoryIncrementalJobAPI) jobAPI).getJobInfo(each.getJobName())).collect(Collectors.toList()); } return Collections.emptyList(); } - - private Stream getJobBriefInfos(final PipelineContextKey contextKey, final String jobType) { - return PipelineAPIFactory.getJobStatisticsAPI(contextKey).getAllJobsBriefInfo().stream().filter(each -> !each.getJobName().startsWith("_")) - .filter(each -> jobType.equals(PipelineJobIdUtils.parseJobType(each.getJobName()).getType())); - } - - /** - * Get job item error message. - * - * @param jobId job id - * @param shardingItem sharding item - * @return map, key is sharding item, value is error message - */ - public String getJobItemErrorMessage(final String jobId, final int shardingItem) { - return Optional.ofNullable(PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobItemErrorMessage(jobId, shardingItem)).orElse(""); - } - - /** - * Update job item error message. - * - * @param jobId job id - * @param shardingItem sharding item - * @param error error - */ - public void updateJobItemErrorMessage(final String jobId, final int shardingItem, final Object error) { - String key = PipelineMetaDataNode.getJobItemErrorMessagePath(jobId, shardingItem); - String value = ""; - if (null != error) { - value = error instanceof Throwable ? ExceptionUtils.getStackTrace((Throwable) error) : error.toString(); - } - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).update(key, value); - } - - /** - * Clean job item error message. - * - * @param jobId job id - * @param shardingItem sharding item - */ - public void cleanJobItemErrorMessage(final String jobId, final int shardingItem) { - String key = PipelineMetaDataNode.getJobItemErrorMessagePath(jobId, shardingItem); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).persist(key, ""); - } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java deleted file mode 100644 index bfc7e8e9fd416..0000000000000 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.data.pipeline.core.job.service.impl; - -import lombok.AccessLevel; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; -import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; -import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfigurationUtils; -import org.apache.shardingsphere.data.pipeline.common.context.InventoryIncrementalJobItemContext; -import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; -import org.apache.shardingsphere.data.pipeline.common.context.PipelineJobItemContext; -import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; -import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; -import org.apache.shardingsphere.data.pipeline.common.job.progress.JobItemIncrementalTasksProgress; -import org.apache.shardingsphere.data.pipeline.common.job.progress.JobItemInventoryTasksProgress; -import org.apache.shardingsphere.data.pipeline.common.job.progress.JobOffsetInfo; -import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlInventoryIncrementalJobItemProgress; -import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlInventoryIncrementalJobItemProgressSwapper; -import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlJobOffsetInfo; -import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlJobOffsetInfoSwapper; -import org.apache.shardingsphere.data.pipeline.common.pojo.DataConsistencyCheckAlgorithmInfo; -import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; -import org.apache.shardingsphere.data.pipeline.common.pojo.TableBasedPipelineJobInfo; -import org.apache.shardingsphere.data.pipeline.common.task.progress.IncrementalTaskProgress; -import org.apache.shardingsphere.data.pipeline.common.task.progress.InventoryTaskProgress; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; -import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; -import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; -import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; -import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; -import org.apache.shardingsphere.data.pipeline.core.metadata.PipelineProcessConfigurationPersistService; -import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; -import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; -import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; -import org.apache.shardingsphere.infra.util.yaml.YamlEngine; - -import java.util.Collection; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -/** - * Abstract inventory incremental job API implementation. - */ -@Slf4j -public abstract class AbstractInventoryIncrementalJobAPIImpl implements InventoryIncrementalJobAPI { - - private final PipelineProcessConfigurationPersistService processConfigPersistService = new PipelineProcessConfigurationPersistService(); - - @Getter(AccessLevel.PROTECTED) - private final YamlInventoryIncrementalJobItemProgressSwapper jobItemProgressSwapper = new YamlInventoryIncrementalJobItemProgressSwapper(); - - private final YamlJobOffsetInfoSwapper jobOffsetInfoSwapper = new YamlJobOffsetInfoSwapper(); - - @Override - public void alterProcessConfiguration(final PipelineContextKey contextKey, final PipelineProcessConfiguration processConfig) { - // TODO check rateLimiter type match or not - processConfigPersistService.persist(contextKey, getType(), processConfig); - } - - @Override - public PipelineProcessConfiguration showProcessConfiguration(final PipelineContextKey contextKey) { - return PipelineProcessConfigurationUtils.convertWithDefaultValue(processConfigPersistService.load(contextKey, getType())); - } - - @Override - public Map getJobProgress(final PipelineJobConfiguration jobConfig) { - String jobId = jobConfig.getJobId(); - JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId); - return IntStream.range(0, jobConfig.getJobShardingCount()).boxed().collect(LinkedHashMap::new, (map, each) -> { - Optional jobItemProgress = getJobItemProgress(jobId, each); - jobItemProgress.ifPresent(optional -> optional.setActive(!jobConfigPOJO.isDisabled())); - map.put(each, jobItemProgress.orElse(null)); - }, LinkedHashMap::putAll); - } - - @Override - public List getJobItemInfos(final String jobId) { - JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId); - PipelineJobConfiguration jobConfig = getJobConfiguration(jobConfigPOJO); - long startTimeMillis = Long.parseLong(Optional.ofNullable(jobConfigPOJO.getProps().getProperty("start_time_millis")).orElse("0")); - Map jobProgress = getJobProgress(jobConfig); - List result = new LinkedList<>(); - PipelineJobManager pipelineJobManager = new PipelineJobManager(this); - for (Entry entry : jobProgress.entrySet()) { - int shardingItem = entry.getKey(); - TableBasedPipelineJobInfo jobInfo = (TableBasedPipelineJobInfo) getJobInfo(jobId); - InventoryIncrementalJobItemProgress jobItemProgress = entry.getValue(); - String errorMessage = pipelineJobManager.getJobItemErrorMessage(jobId, shardingItem); - if (null == jobItemProgress) { - result.add(new InventoryIncrementalJobItemInfo(shardingItem, jobInfo.getTable(), null, startTimeMillis, 0, errorMessage)); - continue; - } - int inventoryFinishedPercentage = 0; - if (JobStatus.EXECUTE_INCREMENTAL_TASK == jobItemProgress.getStatus() || JobStatus.FINISHED == jobItemProgress.getStatus()) { - inventoryFinishedPercentage = 100; - } else if (0 != jobItemProgress.getProcessedRecordsCount() && 0 != jobItemProgress.getInventoryRecordsCount()) { - inventoryFinishedPercentage = (int) Math.min(100, jobItemProgress.getProcessedRecordsCount() * 100 / jobItemProgress.getInventoryRecordsCount()); - } - result.add(new InventoryIncrementalJobItemInfo(shardingItem, jobInfo.getTable(), jobItemProgress, startTimeMillis, inventoryFinishedPercentage, errorMessage)); - } - return result; - } - - @Override - public void persistJobItemProgress(final PipelineJobItemContext jobItemContext) { - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobItemContext.getJobId())) - .persistJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem(), convertJobItemProgress(jobItemContext)); - } - - private String convertJobItemProgress(final PipelineJobItemContext jobItemContext) { - InventoryIncrementalJobItemContext context = (InventoryIncrementalJobItemContext) jobItemContext; - InventoryIncrementalJobItemProgress jobItemProgress = new InventoryIncrementalJobItemProgress(); - jobItemProgress.setStatus(context.getStatus()); - jobItemProgress.setSourceDatabaseType(context.getJobConfig().getSourceDatabaseType()); - jobItemProgress.setDataSourceName(context.getDataSourceName()); - jobItemProgress.setIncremental(getIncrementalTasksProgress(context.getIncrementalTasks())); - jobItemProgress.setInventory(getInventoryTasksProgress(context.getInventoryTasks())); - jobItemProgress.setProcessedRecordsCount(context.getProcessedRecordsCount()); - jobItemProgress.setInventoryRecordsCount(context.getInventoryRecordsCount()); - return YamlEngine.marshal(jobItemProgressSwapper.swapToYamlConfiguration(jobItemProgress)); - } - - @Override - public void updateJobItemProgress(final PipelineJobItemContext jobItemContext) { - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobItemContext.getJobId())) - .updateJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem(), convertJobItemProgress(jobItemContext)); - } - - private JobItemIncrementalTasksProgress getIncrementalTasksProgress(final Collection incrementalTasks) { - return new JobItemIncrementalTasksProgress(incrementalTasks.isEmpty() ? null : (IncrementalTaskProgress) incrementalTasks.iterator().next().getTaskProgress()); - } - - private JobItemInventoryTasksProgress getInventoryTasksProgress(final Collection inventoryTasks) { - Map inventoryTaskProgressMap = new HashMap<>(); - for (PipelineTask each : inventoryTasks) { - inventoryTaskProgressMap.put(each.getTaskId(), (InventoryTaskProgress) each.getTaskProgress()); - } - return new JobItemInventoryTasksProgress(inventoryTaskProgressMap); - } - - @Override - public void persistJobOffsetInfo(final String jobId, final JobOffsetInfo jobOffsetInfo) { - String value = YamlEngine.marshal(jobOffsetInfoSwapper.swapToYamlConfiguration(jobOffsetInfo)); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).persistJobOffsetInfo(jobId, value); - } - - @Override - public JobOffsetInfo getJobOffsetInfo(final String jobId) { - Optional offsetInfo = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobOffsetInfo(jobId); - if (offsetInfo.isPresent()) { - YamlJobOffsetInfo info = YamlEngine.unmarshal(offsetInfo.get(), YamlJobOffsetInfo.class); - return jobOffsetInfoSwapper.swapToObject(info); - } - return jobOffsetInfoSwapper.swapToObject(new YamlJobOffsetInfo()); - } - - @Override - public Optional getJobItemProgress(final String jobId, final int shardingItem) { - Optional progress = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobItemProgress(jobId, shardingItem); - return progress.map(optional -> jobItemProgressSwapper.swapToObject(YamlEngine.unmarshal(optional, YamlInventoryIncrementalJobItemProgress.class))); - } - - @Override - public void updateJobItemStatus(final String jobId, final int shardingItem, final JobStatus status) { - Optional jobItemProgress = getJobItemProgress(jobId, shardingItem); - if (!jobItemProgress.isPresent()) { - return; - } - jobItemProgress.get().setStatus(status); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).updateJobItemProgress(jobId, shardingItem, - YamlEngine.marshal(jobItemProgressSwapper.swapToYamlConfiguration(jobItemProgress.get()))); - } - - @Override - public Collection listDataConsistencyCheckAlgorithms() { - Collection result = new LinkedList<>(); - for (TableDataConsistencyChecker each : ShardingSphereServiceLoader.getServiceInstances(TableDataConsistencyChecker.class)) { - SPIDescription description = each.getClass().getAnnotation(SPIDescription.class); - String typeAliases = each.getTypeAliases().stream().map(Object::toString).collect(Collectors.joining(",")); - result.add(new DataConsistencyCheckAlgorithmInfo(each.getType(), typeAliases, getSupportedDatabaseTypes(each.getSupportedDatabaseTypes()), null == description ? "" : description.value())); - } - return result; - } - - private Collection getSupportedDatabaseTypes(final Collection supportedDatabaseTypes) { - return supportedDatabaseTypes.isEmpty() ? ShardingSphereServiceLoader.getServiceInstances(DatabaseType.class) : supportedDatabaseTypes; - } - - @Override - public boolean aggregateDataConsistencyCheckResults(final String jobId, final Map checkResults) { - if (checkResults.isEmpty()) { - throw new IllegalArgumentException("checkResults empty, jobId:" + jobId); - } - for (Entry entry : checkResults.entrySet()) { - TableDataConsistencyCheckResult checkResult = entry.getValue(); - if (!checkResult.isMatched()) { - return false; - } - } - return true; - } -} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobConfigurationSwapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobConfigurationSwapper.java new file mode 100644 index 0000000000000..4482fb831fd91 --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobConfigurationSwapper.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.job.yaml; + +import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; +import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; +import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; + +/** + * YAML pipeline job configuration swapper. + * + * @param type of YAML configuration + * @param type of swapped pipeline job configuration + */ +public interface YamlPipelineJobConfigurationSwapper extends YamlConfigurationSwapper { + + /** + * Swap to job configuration from text. + * + * @param jobParam job parameter + * @return job configuration + */ + T swapToObject(String jobParam); +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobItemProgressConfiguration.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobItemProgressConfiguration.java new file mode 100644 index 0000000000000..1a1ef05cbdb0d --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobItemProgressConfiguration.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.job.yaml; + +import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; + +/** + * YAML pipeline job item progress configuration. + */ +public interface YamlPipelineJobItemProgressConfiguration extends YamlConfiguration { +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobItemProgressSwapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobItemProgressSwapper.java new file mode 100644 index 0000000000000..4f803634df99c --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/yaml/YamlPipelineJobItemProgressSwapper.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.job.yaml; + +import org.apache.shardingsphere.data.pipeline.common.job.progress.PipelineJobItemProgress; +import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; + +/** + * YAML pipeline job configuration swapper. + * + * @param type of YAML pipeline job item progress configuration + * @param type of swapped pipeline job item progress + */ +public interface YamlPipelineJobItemProgressSwapper extends YamlConfigurationSwapper { + + /** + * Get YAML pipeline job item progress configuration class. + * + * @return YAML pipeline job item progress configuration class + */ + Class getYamlProgressClass(); +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java index 1923ad14911f3..3935bec810603 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java @@ -25,15 +25,17 @@ import org.apache.shardingsphere.data.pipeline.common.execute.ExecuteEngine; import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobNotFoundException; -import org.apache.shardingsphere.data.pipeline.core.job.progress.persist.PipelineJobProgressPersistService; -import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; -import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.progress.PipelineJobProgressDetector; +import org.apache.shardingsphere.data.pipeline.core.job.progress.persist.PipelineJobProgressPersistService; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import java.util.Collection; import java.util.LinkedList; @@ -57,12 +59,15 @@ public class InventoryIncrementalTasksRunner implements PipelineTasksRunner { private final PipelineJobManager jobManager; + private final PipelineJobItemManager jobItemManager; + public InventoryIncrementalTasksRunner(final InventoryIncrementalJobItemContext jobItemContext) { this.jobItemContext = jobItemContext; inventoryTasks = jobItemContext.getInventoryTasks(); incrementalTasks = jobItemContext.getIncrementalTasks(); jobAPI = TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobItemContext.getJobId()).getType()); jobManager = new PipelineJobManager(jobAPI); + jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); } @Override @@ -83,7 +88,8 @@ public void start() { if (jobItemContext.isStopping()) { return; } - TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobItemContext.getJobId()).getType()).persistJobItemProgress(jobItemContext); + new PipelineJobItemManager<>(TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobItemContext.getJobId()).getType()) + .getYamlJobItemProgressSwapper()).persistProgress(jobItemContext); if (PipelineJobProgressDetector.isAllInventoryTasksFinished(inventoryTasks)) { log.info("All inventory tasks finished."); executeIncrementalTask(); @@ -106,7 +112,7 @@ private synchronized void executeInventoryTask() { private void updateLocalAndRemoteJobItemStatus(final JobStatus jobStatus) { jobItemContext.setStatus(jobStatus); - jobAPI.updateJobItemStatus(jobItemContext.getJobId(), jobItemContext.getShardingItem(), jobStatus); + jobItemManager.updateStatus(jobItemContext.getJobId(), jobItemContext.getShardingItem(), jobStatus); } private synchronized void executeIncrementalTask() { @@ -146,7 +152,7 @@ protected void inventorySuccessCallback() { protected void inventoryFailureCallback(final Throwable throwable) { log.error("onFailure, inventory task execute failed.", throwable); String jobId = jobItemContext.getJobId(); - jobManager.updateJobItemErrorMessage(jobId, jobItemContext.getShardingItem(), throwable); + jobItemManager.updateErrorMessage(jobId, jobItemContext.getShardingItem(), throwable); try { jobManager.stop(jobId); } catch (final PipelineJobNotFoundException ignored) { @@ -181,7 +187,7 @@ public void onSuccess() { public void onFailure(final Throwable throwable) { log.error("onFailure, incremental task execute failed.", throwable); String jobId = jobItemContext.getJobId(); - jobManager.updateJobItemErrorMessage(jobId, jobItemContext.getShardingItem(), throwable); + jobItemManager.updateErrorMessage(jobId, jobItemContext.getShardingItem(), throwable); try { jobManager.stop(jobId); } catch (final PipelineJobNotFoundException ignored) { diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/metadata/CaseInsensitiveQualifiedTableTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/metadata/CaseInsensitiveQualifiedTableTest.java new file mode 100644 index 0000000000000..c554dbfc998f3 --- /dev/null +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/metadata/CaseInsensitiveQualifiedTableTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.common.metadata; + +import org.junit.jupiter.api.Test; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class CaseInsensitiveQualifiedTableTest { + + @Test + void assertToString() { + CaseInsensitiveQualifiedTable actual = new CaseInsensitiveQualifiedTable(new CaseInsensitiveIdentifier(null), new CaseInsensitiveIdentifier("t_order")); + assertThat(actual.toString(), is("t_order")); + } + + @Test + void assertToStringThrowsNPE() { + CaseInsensitiveQualifiedTable actual = new CaseInsensitiveQualifiedTable(null, new CaseInsensitiveIdentifier("t_order")); + assertThrows(NullPointerException.class, actual::toString); + } +} diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtilsTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtilsTest.java index 13a4fbba35e92..01676117059a8 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtilsTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtilsTest.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.data.pipeline.core.consistencycheck; +import org.apache.commons.lang3.builder.EqualsBuilder; import org.junit.jupiter.api.Test; import java.math.BigDecimal; @@ -25,6 +26,16 @@ class DataConsistencyCheckUtilsTest { + @Test + void assertIsIntegerEquals() { + EqualsBuilder equalsBuilder = new EqualsBuilder(); + String value = "123"; + Long longValue = Long.parseLong(value); + assertTrue(DataConsistencyCheckUtils.isMatched(equalsBuilder, longValue, Integer.parseInt(value))); + assertTrue(DataConsistencyCheckUtils.isMatched(equalsBuilder, longValue, Short.parseShort(value))); + assertTrue(DataConsistencyCheckUtils.isMatched(equalsBuilder, longValue, Byte.parseByte(value))); + } + @Test void assertIsBigDecimalEquals() { BigDecimal one = BigDecimal.valueOf(3322, 1); diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingJobStatusExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingJobStatusExecutor.java index 87fab7477a4b1..60bed768d2257 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingJobStatusExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingJobStatusExecutor.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; import org.apache.shardingsphere.distsql.handler.ral.query.QueryableRALExecutor; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; @@ -41,7 +42,7 @@ public final class ShowStreamingJobStatusExecutor implements QueryableRALExecuto @Override public Collection getRows(final ShowStreamingStatusStatement sqlStatement) { InventoryIncrementalJobAPI jobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, new CDCJobType().getType()); - List jobItemInfos = jobAPI.getJobItemInfos(sqlStatement.getJobId()); + List jobItemInfos = new InventoryIncrementalJobManager(jobAPI).getJobItemInfos(sqlStatement.getJobId()); long currentTimeMillis = System.currentTimeMillis(); return jobItemInfos.stream().map(each -> generateResultRow(each, currentTimeMillis)).collect(Collectors.toList()); } diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingListExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingListExecutor.java index f6ca1ea172a3f..c5c64bb35ffa4 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingListExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingListExecutor.java @@ -40,7 +40,7 @@ public final class ShowStreamingListExecutor implements QueryableRALExecutor getRows(final ShowStreamingListStatement sqlStatement) { - return pipelineJobManager.getPipelineJobInfos(new PipelineContextKey(InstanceType.PROXY)).stream().map(each -> new LocalDataQueryResultRow(each.getJobMetaData().getJobId(), + return pipelineJobManager.getJobInfos(new PipelineContextKey(InstanceType.PROXY)).stream().map(each -> new LocalDataQueryResultRow(each.getJobMetaData().getJobId(), ((TableBasedPipelineJobInfo) each).getDatabaseName(), ((TableBasedPipelineJobInfo) each).getTable(), each.getJobMetaData().getJobItemCount(), each.getJobMetaData().isActive() ? Boolean.TRUE.toString() : Boolean.FALSE.toString(), each.getJobMetaData().getCreateTime(), Optional.ofNullable(each.getJobMetaData().getStopTime()).orElse(""))).collect(Collectors.toList()); diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingRuleExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingRuleExecutor.java index f646e82611bde..78a02f6ea1f9c 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingRuleExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/cdc/distsql/handler/query/ShowStreamingRuleExecutor.java @@ -21,6 +21,7 @@ import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; import org.apache.shardingsphere.distsql.handler.ral.query.QueryableRALExecutor; import org.apache.shardingsphere.infra.instance.metadata.InstanceType; @@ -39,7 +40,7 @@ public final class ShowStreamingRuleExecutor implements QueryableRALExecutor getRows(final ShowStreamingRuleStatement sqlStatement) { - PipelineProcessConfiguration processConfig = ((InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "STREAMING")) + PipelineProcessConfiguration processConfig = new InventoryIncrementalJobManager((InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "STREAMING")) .showProcessConfiguration(new PipelineContextKey(InstanceType.PROXY)); Collection result = new LinkedList<>(); result.add(new LocalDataQueryResultRow(getString(processConfig.getRead()), getString(processConfig.getWrite()), getString(processConfig.getStreamChannel()))); diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java index 6b99ab543536c..dcc64f1c91ef4 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java @@ -17,35 +17,29 @@ package org.apache.shardingsphere.migration.distsql.handler.query; -import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; -import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; import org.apache.shardingsphere.distsql.handler.ral.query.QueryableRALExecutor; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.distsql.handler.ral.query.algorithm.AlgorithmMetaDataQueryResultRows; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.migration.distsql.statement.ShowMigrationCheckAlgorithmsStatement; -import java.util.Arrays; import java.util.Collection; -import java.util.stream.Collectors; /** * Show migration check algorithms' executor. */ public final class ShowMigrationCheckAlgorithmsExecutor implements QueryableRALExecutor { + private final AlgorithmMetaDataQueryResultRows algorithmMetaDataQueryResultRows = new AlgorithmMetaDataQueryResultRows(TableDataConsistencyChecker.class); + @Override public Collection getRows(final ShowMigrationCheckAlgorithmsStatement sqlStatement) { - InventoryIncrementalJobAPI jobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "MIGRATION"); - return jobAPI.listDataConsistencyCheckAlgorithms().stream().map( - each -> new LocalDataQueryResultRow(each.getType(), each.getTypeAliases(), - each.getSupportedDatabaseTypes().stream().map(DatabaseType::getType).collect(Collectors.joining(",")), each.getDescription())) - .collect(Collectors.toList()); + return algorithmMetaDataQueryResultRows.getRows(); } @Override public Collection getColumnNames() { - return Arrays.asList("type", "type_aliases", "supported_database_types", "description"); + return algorithmMetaDataQueryResultRows.getColumnNames(); } @Override diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationJobStatusExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationJobStatusExecutor.java index b92d9efb73e4c..79684dc0eb1f4 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationJobStatusExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationJobStatusExecutor.java @@ -20,6 +20,7 @@ import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; import org.apache.shardingsphere.distsql.handler.ral.query.QueryableRALExecutor; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; @@ -40,7 +41,7 @@ public final class ShowMigrationJobStatusExecutor implements QueryableRALExecuto @Override public Collection getRows(final ShowMigrationStatusStatement sqlStatement) { InventoryIncrementalJobAPI jobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "MIGRATION"); - List jobItemInfos = jobAPI.getJobItemInfos(sqlStatement.getJobId()); + List jobItemInfos = new InventoryIncrementalJobManager(jobAPI).getJobItemInfos(sqlStatement.getJobId()); long currentTimeMillis = System.currentTimeMillis(); return jobItemInfos.stream().map(each -> generateResultRow(each, currentTimeMillis)).collect(Collectors.toList()); } diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationListExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationListExecutor.java index a731a088a97e4..ce1877a57197d 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationListExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationListExecutor.java @@ -39,7 +39,7 @@ public final class ShowMigrationListExecutor implements QueryableRALExecutor getRows(final ShowMigrationListStatement sqlStatement) { - return pipelineJobManager.getPipelineJobInfos(new PipelineContextKey(InstanceType.PROXY)).stream().map(each -> new LocalDataQueryResultRow(each.getJobMetaData().getJobId(), + return pipelineJobManager.getJobInfos(new PipelineContextKey(InstanceType.PROXY)).stream().map(each -> new LocalDataQueryResultRow(each.getJobMetaData().getJobId(), ((TableBasedPipelineJobInfo) each).getTable(), each.getJobMetaData().getJobItemCount(), each.getJobMetaData().isActive() ? Boolean.TRUE.toString() : Boolean.FALSE.toString(), each.getJobMetaData().getCreateTime(), each.getJobMetaData().getStopTime())).collect(Collectors.toList()); diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java index 547d6255f4b9f..1bf18932eccfe 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java @@ -18,8 +18,9 @@ package org.apache.shardingsphere.migration.distsql.handler.update; import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; -import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.progress.PipelineJobProgressDetector; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.impl.ConsistencyCheckJobAPI; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.pojo.CreateConsistencyCheckJobParameter; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; @@ -47,13 +48,14 @@ public void executeUpdate(final String databaseName, final CheckMigrationStateme String algorithmTypeName = null == typeStrategy ? null : typeStrategy.getName(); Properties algorithmProps = null == typeStrategy ? null : typeStrategy.getProps(); String jobId = sqlStatement.getJobId(); - MigrationJobConfiguration jobConfig = migrationJobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)); + MigrationJobConfiguration jobConfig = new PipelineJobManager(migrationJobAPI).getJobConfiguration(jobId); verifyInventoryFinished(jobConfig); checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(jobId, algorithmTypeName, algorithmProps, jobConfig.getSourceDatabaseType(), jobConfig.getTargetDatabaseType())); } private void verifyInventoryFinished(final MigrationJobConfiguration jobConfig) { - ShardingSpherePreconditions.checkState(PipelineJobProgressDetector.isInventoryFinished(jobConfig.getJobShardingCount(), migrationJobAPI.getJobProgress(jobConfig).values()), + InventoryIncrementalJobManager inventoryIncrementalJobManager = new InventoryIncrementalJobManager(migrationJobAPI); + ShardingSpherePreconditions.checkState(PipelineJobProgressDetector.isInventoryFinished(jobConfig.getJobShardingCount(), inventoryIncrementalJobManager.getJobProgress(jobConfig).values()), () -> new PipelineInvalidParameterException("Inventory is not finished.")); } diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java index 4d6d31a3cfbee..c6cd66526876b 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java @@ -67,9 +67,11 @@ import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; -import org.apache.shardingsphere.data.pipeline.core.job.service.impl.AbstractInventoryIncrementalJobAPIImpl; import org.apache.shardingsphere.data.pipeline.core.preparer.PipelineJobPreparerUtils; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap; @@ -83,6 +85,7 @@ import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine; import java.sql.SQLException; +import java.time.LocalDateTime; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -96,7 +99,7 @@ * CDC job API. */ @Slf4j -public final class CDCJobAPI extends AbstractInventoryIncrementalJobAPIImpl { +public final class CDCJobAPI implements InventoryIncrementalJobAPI { private final YamlDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlDataSourceConfigurationSwapper(); @@ -123,7 +126,7 @@ public String createJob(final StreamDataParameter param, final CDCSinkType sinkT if (repositoryAPI.isExisted(jobConfigKey)) { log.warn("CDC job already exists in registry center, ignore, jobConfigKey={}", jobConfigKey); } else { - repositoryAPI.persist(PipelineMetaDataNode.getJobRootPath(jobConfig.getJobId()), getPipelineJobClass().getName()); + repositoryAPI.persist(PipelineMetaDataNode.getJobRootPath(jobConfig.getJobId()), getJobClass().getName()); JobConfigurationPOJO jobConfigPOJO = jobConfig.convertToJobConfigurationPOJO(); jobConfigPOJO.setDisabled(true); repositoryAPI.persist(jobConfigKey, YamlEngine.marshal(jobConfigPOJO)); @@ -168,15 +171,16 @@ private ShardingSpherePipelineDataSourceConfiguration getDataSourceConfiguration private void initIncrementalPosition(final CDCJobConfiguration jobConfig) { String jobId = jobConfig.getJobId(); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(getYamlJobItemProgressSwapper()); try (PipelineDataSourceManager pipelineDataSourceManager = new DefaultPipelineDataSourceManager()) { for (int i = 0; i < jobConfig.getJobShardingCount(); i++) { - if (getJobItemProgress(jobId, i).isPresent()) { + if (jobItemManager.getProgress(jobId, i).isPresent()) { continue; } IncrementalDumperContext dumperContext = buildDumperContext(jobConfig, i, new TableAndSchemaNameMapper(jobConfig.getSchemaTableNames())); InventoryIncrementalJobItemProgress jobItemProgress = getInventoryIncrementalJobItemProgress(jobConfig, pipelineDataSourceManager, dumperContext); PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).persistJobItemProgress( - jobId, i, YamlEngine.marshal(getJobItemProgressSwapper().swapToYamlConfiguration(jobItemProgress))); + jobId, i, YamlEngine.marshal(getYamlJobItemProgressSwapper().swapToYamlConfiguration(jobItemProgress))); } } catch (final SQLException ex) { throw new PrepareJobWithGetBinlogPositionException(jobId, ex); @@ -220,6 +224,7 @@ public void updateJobConfigurationDisabled(final String jobId, final boolean dis JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId); jobConfigPOJO.setDisabled(disabled); if (disabled) { + jobConfigPOJO.getProps().setProperty("stop_time", LocalDateTime.now().format(PipelineJobConfiguration.DATE_TIME_FORMATTER)); jobConfigPOJO.getProps().setProperty("stop_time_millis", String.valueOf(System.currentTimeMillis())); } else { jobConfigPOJO.getProps().setProperty("start_time_millis", String.valueOf(System.currentTimeMillis())); @@ -275,19 +280,19 @@ private ImporterConfiguration buildImporterConfiguration(final CDCJobConfigurati @Override public CDCProcessContext buildPipelineProcessContext(final PipelineJobConfiguration pipelineJobConfig) { - return new CDCProcessContext(pipelineJobConfig.getJobId(), showProcessConfiguration(PipelineJobIdUtils.parseContextKey(pipelineJobConfig.getJobId()))); + InventoryIncrementalJobManager jobManager = new InventoryIncrementalJobManager(this); + return new CDCProcessContext(pipelineJobConfig.getJobId(), jobManager.showProcessConfiguration(PipelineJobIdUtils.parseContextKey(pipelineJobConfig.getJobId()))); } @Override - public CDCJobConfiguration getJobConfiguration(final JobConfigurationPOJO jobConfigPOJO) { - return new YamlCDCJobConfigurationSwapper().swapToObject(jobConfigPOJO.getJobParameter()); + public YamlCDCJobConfigurationSwapper getYamlJobConfigurationSwapper() { + return new YamlCDCJobConfigurationSwapper(); } @Override public TableBasedPipelineJobInfo getJobInfo(final String jobId) { - JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId); - PipelineJobMetaData jobMetaData = new PipelineJobMetaData(jobConfigPOJO); - CDCJobConfiguration jobConfig = getJobConfiguration(jobConfigPOJO); + PipelineJobMetaData jobMetaData = new PipelineJobMetaData(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)); + CDCJobConfiguration jobConfig = new PipelineJobManager(this).getJobConfiguration(jobId); return new TableBasedPipelineJobInfo(jobMetaData, jobConfig.getDatabaseName(), String.join(", ", jobConfig.getSchemaTableNames())); } @@ -301,9 +306,8 @@ public void commit(final String jobId) { * @param jobId job id */ public void dropStreaming(final String jobId) { - JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId); - CDCJobConfiguration jobConfig = getJobConfiguration(jobConfigPOJO); - ShardingSpherePreconditions.checkState(jobConfigPOJO.isDisabled(), () -> new PipelineInternalException("Can't drop streaming job which is active")); + CDCJobConfiguration jobConfig = new PipelineJobManager(this).getJobConfiguration(jobId); + ShardingSpherePreconditions.checkState(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId).isDisabled(), () -> new PipelineInternalException("Can't drop streaming job which is active")); new PipelineJobManager(this).drop(jobId); cleanup(jobConfig); } @@ -329,7 +333,7 @@ public PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(final } @Override - public Class getPipelineJobClass() { + public Class getJobClass() { return CDCJob.class; } diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/importer/sink/CDCSocketSink.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/importer/sink/CDCSocketSink.java index d45c14b4b8c58..9c84a6a9112ae 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/importer/sink/CDCSocketSink.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/importer/sink/CDCSocketSink.java @@ -29,6 +29,7 @@ import org.apache.shardingsphere.data.pipeline.cdc.util.DataRecordResultConvertUtils; import org.apache.shardingsphere.data.pipeline.common.job.progress.listener.PipelineJobProgressUpdatedParameter; import org.apache.shardingsphere.data.pipeline.core.importer.sink.PipelineSink; +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import java.io.IOException; @@ -117,5 +118,6 @@ private void doAwait() { @Override public void close() throws IOException { + channel.writeAndFlush(CDCResponseUtils.failed("", XOpenSQLState.GENERAL_ERROR.getValue(), "The socket channel is closed.")); } } diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java index c2637c0de36e6..0f6b66fa7c1d0 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/job/CDCJob.java @@ -41,7 +41,7 @@ import org.apache.shardingsphere.data.pipeline.core.importer.sink.PipelineSink; import org.apache.shardingsphere.data.pipeline.core.job.AbstractPipelineJob; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; -import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner; import org.apache.shardingsphere.elasticjob.api.ShardingContext; @@ -65,7 +65,7 @@ public final class CDCJob extends AbstractPipelineJob implements SimpleJob { private final CDCJobAPI jobAPI = new CDCJobAPI(); - private final PipelineJobManager jobManager = new PipelineJobManager(jobAPI); + private final PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); private final CDCJobPreparer jobPreparer = new CDCJobPreparer(); @@ -93,7 +93,7 @@ public void execute(final ShardingContext shardingContext) { continue; } jobItemContexts.add(jobItemContext); - jobManager.cleanJobItemErrorMessage(jobId, shardingItem); + jobItemManager.cleanErrorMessage(jobId, shardingItem); log.info("start tasks runner, jobId={}, shardingItem={}", jobId, shardingItem); } if (jobItemContexts.isEmpty()) { @@ -106,7 +106,7 @@ public void execute(final ShardingContext shardingContext) { } private CDCJobItemContext buildPipelineJobItemContext(final CDCJobConfiguration jobConfig, final int shardingItem) { - Optional initProgress = jobAPI.getJobItemProgress(jobConfig.getJobId(), shardingItem); + Optional initProgress = jobItemManager.getProgress(jobConfig.getJobId(), shardingItem); CDCProcessContext jobProcessContext = jobAPI.buildPipelineProcessContext(jobConfig); CDCTaskConfiguration taskConfig = jobAPI.buildTaskConfiguration(jobConfig, shardingItem, jobProcessContext.getPipelineProcessConfig()); return new CDCJobItemContext(jobConfig, shardingItem, initProgress.orElse(null), jobProcessContext, taskConfig, dataSourceManager, sink); @@ -127,7 +127,7 @@ private void prepare(final Collection jobItemContexts) { private void processFailed(final String jobId, final int shardingItem, final Exception ex) { log.error("job execution failed, {}-{}", jobId, shardingItem, ex); - jobManager.updateJobItemErrorMessage(jobId, shardingItem, ex); + jobItemManager.updateErrorMessage(jobId, shardingItem, ex); PipelineJobCenter.stop(jobId); jobAPI.updateJobConfigurationDisabled(jobId, true); } @@ -151,7 +151,7 @@ private void executeInventoryTasks(final List jobItemContexts private void updateLocalAndRemoteJobItemStatus(final PipelineJobItemContext jobItemContext, final JobStatus jobStatus) { jobItemContext.setStatus(jobStatus); - jobAPI.updateJobItemStatus(jobItemContext.getJobId(), jobItemContext.getShardingItem(), jobStatus); + jobItemManager.updateStatus(jobItemContext.getJobId(), jobItemContext.getShardingItem(), jobStatus); } private void executeIncrementalTasks(final List jobItemContexts) { @@ -204,7 +204,7 @@ public void onSuccess() { public void onFailure(final Throwable throwable) { log.error("onFailure, {} task execute failed.", identifier, throwable); String jobId = jobItemContext.getJobId(); - jobManager.updateJobItemErrorMessage(jobId, jobItemContext.getShardingItem(), throwable); + jobItemManager.updateErrorMessage(jobId, jobItemContext.getShardingItem(), throwable); if (jobItemContext.getSink() instanceof CDCSocketSink) { CDCSocketSink cdcSink = (CDCSocketSink) jobItemContext.getSink(); cdcSink.getChannel().writeAndFlush(CDCResponseUtils.failed("", "", throwable.getMessage())); diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java index 681307a84c1a0..b39a064745818 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java @@ -18,11 +18,6 @@ package org.apache.shardingsphere.data.pipeline.cdc.core.prepare; import lombok.extern.slf4j.Slf4j; -import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext; -import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.InventoryDumperContext; -import org.apache.shardingsphere.data.pipeline.core.ingest.channel.PipelineChannel; -import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.Dumper; -import org.apache.shardingsphere.data.pipeline.core.ingest.position.IngestPosition; import org.apache.shardingsphere.data.pipeline.cdc.api.impl.CDCJobAPI; import org.apache.shardingsphere.data.pipeline.cdc.config.task.CDCTaskConfiguration; import org.apache.shardingsphere.data.pipeline.cdc.context.CDCJobItemContext; @@ -35,17 +30,23 @@ import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.JobItemIncrementalTasksProgress; +import org.apache.shardingsphere.data.pipeline.common.spi.ingest.dumper.IncrementalDumperCreator; import org.apache.shardingsphere.data.pipeline.common.task.progress.IncrementalTaskProgress; -import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.InventoryDumper; import org.apache.shardingsphere.data.pipeline.core.exception.job.PrepareJobWithGetBinlogPositionException; import org.apache.shardingsphere.data.pipeline.core.importer.Importer; import org.apache.shardingsphere.data.pipeline.core.importer.ImporterType; +import org.apache.shardingsphere.data.pipeline.core.ingest.channel.PipelineChannel; +import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.Dumper; +import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.InventoryDumper; +import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext; +import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.InventoryDumperContext; +import org.apache.shardingsphere.data.pipeline.core.ingest.position.IngestPosition; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.preparer.InventoryTaskSplitter; import org.apache.shardingsphere.data.pipeline.core.preparer.PipelineJobPreparerUtils; import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; import org.apache.shardingsphere.data.pipeline.core.task.PipelineTaskUtils; -import org.apache.shardingsphere.data.pipeline.common.spi.ingest.dumper.IncrementalDumperCreator; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.opengauss.type.OpenGaussDatabaseType; @@ -68,6 +69,8 @@ public final class CDCJobPreparer { private final CDCJobAPI jobAPI = new CDCJobAPI(); + private final PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); + /** * Do prepare work. * @@ -86,9 +89,9 @@ public void initTasks(final Collection jobItemContexts) { private void initTasks0(final CDCJobItemContext jobItemContext, final AtomicBoolean inventoryImporterUsed, final List inventoryChannelProgressPairs, final AtomicBoolean incrementalImporterUsed, final List incrementalChannelProgressPairs) { - Optional jobItemProgress = jobAPI.getJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem()); + Optional jobItemProgress = jobItemManager.getProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem()); if (!jobItemProgress.isPresent()) { - jobAPI.persistJobItemProgress(jobItemContext); + jobItemManager.persistProgress(jobItemContext); } if (jobItemContext.isStopping()) { PipelineJobCenter.stop(jobItemContext.getJobId()); diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/handler/CDCBackendHandler.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/handler/CDCBackendHandler.java index 2ef78db661ef0..05070a7a901ef 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/handler/CDCBackendHandler.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/handler/CDCBackendHandler.java @@ -47,7 +47,7 @@ import org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobNotFoundException; import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; -import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.database.opengauss.type.OpenGaussDatabaseType; @@ -72,6 +72,8 @@ public final class CDCBackendHandler { private final CDCJobAPI jobAPI = new CDCJobAPI(); + private final PipelineJobManager jobManager = new PipelineJobManager(jobAPI); + /** * Get database name by job ID. * @@ -79,7 +81,7 @@ public final class CDCBackendHandler { * @return database */ public String getDatabaseNameByJobId(final String jobId) { - return jobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)).getDatabaseName(); + return jobManager.getJobConfiguration(jobId).getDatabaseName(); } /** @@ -127,7 +129,7 @@ public CDCResponse streamData(final String requestId, final StreamDataRequestBod * @param connectionContext connection context */ public void startStreaming(final String jobId, final CDCConnectionContext connectionContext, final Channel channel) { - CDCJobConfiguration cdcJobConfig = jobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)); + CDCJobConfiguration cdcJobConfig = jobManager.getJobConfiguration(jobId); ShardingSpherePreconditions.checkNotNull(cdcJobConfig, () -> new PipelineJobNotFoundException(jobId)); if (PipelineJobCenter.isJobExisting(jobId)) { PipelineJobCenter.stop(jobId); diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/yaml/swapper/YamlCDCJobConfigurationSwapper.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/yaml/swapper/YamlCDCJobConfigurationSwapper.java index 871ed0eb7eb1d..34ba337f69d4a 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/yaml/swapper/YamlCDCJobConfigurationSwapper.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/yaml/swapper/YamlCDCJobConfigurationSwapper.java @@ -18,17 +18,17 @@ package org.apache.shardingsphere.data.pipeline.cdc.yaml.swapper; import org.apache.shardingsphere.data.pipeline.api.type.ShardingSpherePipelineDataSourceConfiguration; -import org.apache.shardingsphere.data.pipeline.common.datasource.yaml.YamlPipelineDataSourceConfigurationSwapper; import org.apache.shardingsphere.data.pipeline.cdc.config.job.CDCJobConfiguration; import org.apache.shardingsphere.data.pipeline.cdc.config.job.CDCJobConfiguration.SinkConfiguration; import org.apache.shardingsphere.data.pipeline.cdc.constant.CDCSinkType; import org.apache.shardingsphere.data.pipeline.cdc.yaml.config.YamlCDCJobConfiguration; import org.apache.shardingsphere.data.pipeline.cdc.yaml.config.YamlCDCJobConfiguration.YamlSinkConfiguration; import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine; +import org.apache.shardingsphere.data.pipeline.common.datasource.yaml.YamlPipelineDataSourceConfigurationSwapper; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobConfigurationSwapper; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; -import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; import java.util.Collections; import java.util.List; @@ -37,7 +37,7 @@ /** * YAML CDC job configuration swapper. */ -public final class YamlCDCJobConfigurationSwapper implements YamlConfigurationSwapper { +public final class YamlCDCJobConfigurationSwapper implements YamlPipelineJobConfigurationSwapper { private final YamlPipelineDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlPipelineDataSourceConfigurationSwapper(); @@ -81,12 +81,7 @@ public CDCJobConfiguration swapToObject(final YamlCDCJobConfiguration yamlConfig jobShardingDataNodes, yamlConfig.isDecodeWithTX(), sinkConfig, yamlConfig.getConcurrency(), yamlConfig.getRetryTimes()); } - /** - * Swap to job configuration from text. - * - * @param jobParam job parameter - * @return job configuration - */ + @Override public CDCJobConfiguration swapToObject(final String jobParam) { return null == jobParam ? null : swapToObject(YamlEngine.unmarshal(jobParam, YamlCDCJobConfiguration.class, true)); } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJob.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJob.java index 7d289dd0a8962..8932efa661360 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJob.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJob.java @@ -22,8 +22,8 @@ import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.progress.ConsistencyCheckJobItemProgress; import org.apache.shardingsphere.data.pipeline.core.job.AbstractSimplePipelineJob; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner; -import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.impl.ConsistencyCheckJobAPI; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.ConsistencyCheckJobConfiguration; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.yaml.YamlConsistencyCheckJobConfigurationSwapper; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.context.ConsistencyCheckJobItemContext; @@ -45,8 +45,8 @@ public ConsistencyCheckJob(final String jobId) { @Override public ConsistencyCheckJobItemContext buildPipelineJobItemContext(final ShardingContext shardingContext) { ConsistencyCheckJobConfiguration jobConfig = new YamlConsistencyCheckJobConfigurationSwapper().swapToObject(shardingContext.getJobParameter()); - ConsistencyCheckJobAPI jobAPI = (ConsistencyCheckJobAPI) getJobAPI(); - Optional jobItemProgress = jobAPI.getJobItemProgress(jobConfig.getJobId(), shardingContext.getShardingItem()); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(getJobAPI().getYamlJobItemProgressSwapper()); + Optional jobItemProgress = jobItemManager.getProgress(jobConfig.getJobId(), shardingContext.getShardingItem()); return new ConsistencyCheckJobItemContext(jobConfig, shardingContext.getShardingItem(), JobStatus.RUNNING, jobItemProgress.orElse(null)); } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java index 88e39ee70d309..63e6ff424362a 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java @@ -20,14 +20,11 @@ import com.google.common.base.Strings; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; -import org.apache.shardingsphere.data.pipeline.common.context.PipelineJobItemContext; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.progress.ConsistencyCheckJobItemProgress; -import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlConsistencyCheckJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlConsistencyCheckJobItemProgressSwapper; import org.apache.shardingsphere.data.pipeline.common.pojo.ConsistencyCheckJobItemInfo; import org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyCheckerFactory; import org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedPipelineDatabaseTypeException; @@ -35,8 +32,10 @@ import org.apache.shardingsphere.data.pipeline.core.exception.job.UncompletedConsistencyCheckJobExistsException; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.ConsistencyCheckJob; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.ConsistencyCheckJobId; @@ -44,13 +43,11 @@ import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.ConsistencyCheckJobConfiguration; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.yaml.YamlConsistencyCheckJobConfiguration; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.yaml.YamlConsistencyCheckJobConfigurationSwapper; -import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.context.ConsistencyCheckJobItemContext; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.util.ConsistencyCheckSequence; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import java.sql.Timestamp; import java.time.Duration; @@ -75,8 +72,6 @@ public final class ConsistencyCheckJobAPI implements PipelineJobAPI { private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"); - private final YamlConsistencyCheckJobItemProgressSwapper swapper = new YamlConsistencyCheckJobItemProgressSwapper(); - /** * Create consistency check configuration and start job. * @@ -89,7 +84,8 @@ public String createJobAndStart(final CreateConsistencyCheckJobParameter param) GovernanceRepositoryAPI repositoryAPI = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(parentJobId)); Optional latestCheckJobId = repositoryAPI.getLatestCheckJobId(parentJobId); if (latestCheckJobId.isPresent()) { - Optional progress = getJobItemProgress(latestCheckJobId.get(), 0); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(getYamlJobItemProgressSwapper()); + Optional progress = jobItemManager.getProgress(latestCheckJobId.get(), 0); if (!progress.isPresent() || JobStatus.FINISHED != progress.get().getStatus()) { log.info("check job already exists and status is not FINISHED, progress={}", progress); throw new UncompletedConsistencyCheckJobExistsException(latestCheckJobId.get()); @@ -122,48 +118,6 @@ public boolean isIgnoreToStartDisabledJobWhenJobItemProgressIsFinished() { return true; } - @Override - public void persistJobItemProgress(final PipelineJobItemContext jobItemContext) { - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobItemContext.getJobId())) - .persistJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem(), convertJobItemProgress(jobItemContext)); - } - - private String convertJobItemProgress(final PipelineJobItemContext jobItemContext) { - ConsistencyCheckJobItemContext context = (ConsistencyCheckJobItemContext) jobItemContext; - ConsistencyCheckJobItemProgressContext progressContext = context.getProgressContext(); - String tableNames = String.join(",", progressContext.getTableNames()); - String ignoredTableNames = String.join(",", progressContext.getIgnoredTableNames()); - ConsistencyCheckJobItemProgress jobItemProgress = new ConsistencyCheckJobItemProgress(tableNames, ignoredTableNames, progressContext.getCheckedRecordsCount().get(), - progressContext.getRecordsCount(), progressContext.getCheckBeginTimeMillis(), progressContext.getCheckEndTimeMillis(), - progressContext.getSourceTableCheckPositions(), progressContext.getTargetTableCheckPositions(), progressContext.getSourceDatabaseType()); - jobItemProgress.setStatus(context.getStatus()); - return YamlEngine.marshal(swapper.swapToYamlConfiguration(jobItemProgress)); - } - - @Override - public void updateJobItemProgress(final PipelineJobItemContext jobItemContext) { - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobItemContext.getJobId())) - .updateJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem(), convertJobItemProgress(jobItemContext)); - } - - @Override - public Optional getJobItemProgress(final String jobId, final int shardingItem) { - Optional progress = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobItemProgress(jobId, shardingItem); - return progress.map(s -> swapper.swapToObject(YamlEngine.unmarshal(s, YamlConsistencyCheckJobItemProgress.class, true))); - } - - @Override - public void updateJobItemStatus(final String jobId, final int shardingItem, final JobStatus status) { - Optional jobItemProgress = getJobItemProgress(jobId, shardingItem); - if (!jobItemProgress.isPresent()) { - log.warn("updateJobItemStatus, jobProgress is null, jobId={}, shardingItem={}", jobId, shardingItem); - return; - } - jobItemProgress.get().setStatus(status); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(jobId)).updateJobItemProgress(jobId, shardingItem, - YamlEngine.marshal(swapper.swapToYamlConfiguration(jobItemProgress.get()))); - } - /** * Start by parent job id. * @@ -222,7 +176,8 @@ public List getJobItemInfos(final String parentJobI Optional latestCheckJobId = governanceRepositoryAPI.getLatestCheckJobId(parentJobId); ShardingSpherePreconditions.checkState(latestCheckJobId.isPresent(), () -> new ConsistencyCheckJobNotFoundException(parentJobId)); String checkJobId = latestCheckJobId.get(); - Optional progress = getJobItemProgress(checkJobId, 0); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(getYamlJobItemProgressSwapper()); + Optional progress = jobItemManager.getProgress(checkJobId, 0); if (!progress.isPresent()) { return Collections.emptyList(); } @@ -262,7 +217,8 @@ private ConsistencyCheckJobItemInfo getJobItemInfo(final String parentJobId) { Optional latestCheckJobId = governanceRepositoryAPI.getLatestCheckJobId(parentJobId); ShardingSpherePreconditions.checkState(latestCheckJobId.isPresent(), () -> new ConsistencyCheckJobNotFoundException(parentJobId)); String checkJobId = latestCheckJobId.get(); - Optional progress = getJobItemProgress(checkJobId, 0); + PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(getYamlJobItemProgressSwapper()); + Optional progress = jobItemManager.getProgress(checkJobId, 0); ConsistencyCheckJobItemInfo result = new ConsistencyCheckJobItemInfo(); JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(checkJobId); result.setActive(!jobConfigPOJO.isDisabled()); @@ -278,7 +234,7 @@ private ConsistencyCheckJobItemInfo getJobItemInfo(final String parentJobId) { fillInJobItemInfoWithTimes(result, jobItemProgress, jobConfigPOJO); result.setTableNames(Optional.ofNullable(jobItemProgress.getTableNames()).orElse("")); fillInJobItemInfoWithCheckAlgorithm(result, checkJobId); - result.setErrorMessage(new PipelineJobManager(this).getJobItemErrorMessage(checkJobId, 0)); + result.setErrorMessage(new PipelineJobItemManager<>(getYamlJobItemProgressSwapper()).getErrorMessage(checkJobId, 0)); Map checkJobResult = governanceRepositoryAPI.getCheckJobResult(parentJobId, checkJobId); fillInJobItemInfoWithCheckResult(result, checkJobResult, parentJobId); result.setCheckFailedTableNames(checkJobResult.entrySet().stream().filter(each -> !each.getValue().isIgnored() && !each.getValue().isMatched()) @@ -312,7 +268,7 @@ private void fillInJobItemInfoWithTimes(final ConsistencyCheckJobItemInfo result } private void fillInJobItemInfoWithCheckAlgorithm(final ConsistencyCheckJobItemInfo result, final String checkJobId) { - ConsistencyCheckJobConfiguration jobConfig = getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(checkJobId)); + ConsistencyCheckJobConfiguration jobConfig = new PipelineJobManager(this).getJobConfiguration(checkJobId); result.setAlgorithmType(jobConfig.getAlgorithmTypeName()); if (null != jobConfig.getAlgorithmProps()) { result.setAlgorithmProps(jobConfig.getAlgorithmProps().entrySet().stream().map(entry -> String.format("'%s'='%s'", entry.getKey(), entry.getValue())).collect(Collectors.joining(","))); @@ -323,19 +279,25 @@ private void fillInJobItemInfoWithCheckResult(final ConsistencyCheckJobItemInfo if (checkJobResult.isEmpty()) { result.setCheckSuccess(null); } else { - InventoryIncrementalJobAPI inventoryIncrementalJobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService( - PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(parentJobId).getType()); - result.setCheckSuccess(inventoryIncrementalJobAPI.aggregateDataConsistencyCheckResults(parentJobId, checkJobResult)); + InventoryIncrementalJobManager inventoryIncrementalJobManager = new InventoryIncrementalJobManager( + (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(parentJobId).getType())); + result.setCheckSuccess(inventoryIncrementalJobManager.aggregateDataConsistencyCheckResults(parentJobId, checkJobResult)); } } @Override - public ConsistencyCheckJobConfiguration getJobConfiguration(final JobConfigurationPOJO jobConfigPOJO) { - return new YamlConsistencyCheckJobConfigurationSwapper().swapToObject(jobConfigPOJO.getJobParameter()); + public YamlConsistencyCheckJobConfigurationSwapper getYamlJobConfigurationSwapper() { + return new YamlConsistencyCheckJobConfigurationSwapper(); + } + + @SuppressWarnings("unchecked") + @Override + public YamlConsistencyCheckJobItemProgressSwapper getYamlJobItemProgressSwapper() { + return new YamlConsistencyCheckJobItemProgressSwapper(); } @Override - public Class getPipelineJobClass() { + public Class getJobClass() { return ConsistencyCheckJob.class; } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java index 5431c935f213a..f18ce3f59036f 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java @@ -17,16 +17,16 @@ package org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.yaml; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobConfigurationSwapper; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.ConsistencyCheckJobConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; -import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; /** * YAML consistency check job configuration swapper. */ -public final class YamlConsistencyCheckJobConfigurationSwapper implements YamlConfigurationSwapper { +public final class YamlConsistencyCheckJobConfigurationSwapper implements YamlPipelineJobConfigurationSwapper { @Override public YamlConsistencyCheckJobConfiguration swapToYamlConfiguration(final ConsistencyCheckJobConfiguration data) { @@ -45,12 +45,7 @@ public ConsistencyCheckJobConfiguration swapToObject(final YamlConsistencyCheckJ return new ConsistencyCheckJobConfiguration(yamlConfig.getJobId(), yamlConfig.getParentJobId(), yamlConfig.getAlgorithmTypeName(), yamlConfig.getAlgorithmProps(), databaseType); } - /** - * Swap to job configuration from text. - * - * @param jobParam job parameter - * @return job configuration - */ + @Override public ConsistencyCheckJobConfiguration swapToObject(final String jobParam) { return null == jobParam ? null : swapToObject(YamlEngine.unmarshal(jobParam, YamlConsistencyCheckJobConfiguration.class, true)); } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java index 5dd7865398b8a..30bed68443f42 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java @@ -69,4 +69,11 @@ public ConsistencyCheckJobItemContext(final ConsistencyCheckJobConfiguration job public PipelineProcessContext getJobProcessContext() { return processContext; } + + @Override + public ConsistencyCheckJobItemProgress toProgress() { + ConsistencyCheckJobItemProgress result = new ConsistencyCheckJobItemProgress(progressContext); + result.setStatus(status); + return result; + } } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java index 9c8b1f0f9491c..136e92b3dc5bf 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java @@ -19,12 +19,13 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.apache.shardingsphere.data.pipeline.common.execute.AbstractPipelineLifecycleRunnable; -import org.apache.shardingsphere.data.pipeline.common.execute.PipelineLifecycleRunnable; import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; +import org.apache.shardingsphere.data.pipeline.common.execute.AbstractPipelineLifecycleRunnable; import org.apache.shardingsphere.data.pipeline.common.execute.ExecuteCallback; import org.apache.shardingsphere.data.pipeline.common.execute.ExecuteEngine; +import org.apache.shardingsphere.data.pipeline.common.execute.PipelineLifecycleRunnable; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; @@ -32,6 +33,7 @@ import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.impl.ConsistencyCheckJobAPI; @@ -50,9 +52,11 @@ @Slf4j public final class ConsistencyCheckTasksRunner implements PipelineTasksRunner { - private final ConsistencyCheckJobAPI checkJobAPI = new ConsistencyCheckJobAPI(); + private final ConsistencyCheckJobAPI jobAPI = new ConsistencyCheckJobAPI(); + + private final PipelineJobManager jobManager = new PipelineJobManager(jobAPI); - private final PipelineJobManager jobManager = new PipelineJobManager(checkJobAPI); + private final PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); @Getter private final ConsistencyCheckJobItemContext jobItemContext; @@ -80,7 +84,8 @@ public void start() { if (jobItemContext.isStopping()) { return; } - TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobItemContext.getJobId()).getType()).persistJobItemProgress(jobItemContext); + new PipelineJobItemManager<>(TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobItemContext.getJobId()).getType()) + .getYamlJobItemProgressSwapper()).persistProgress(jobItemContext); CompletableFuture future = jobItemContext.getProcessContext().getConsistencyCheckExecuteEngine().submit(checkExecutor); ExecuteEngine.trigger(Collections.singletonList(future), new CheckExecuteCallback()); } @@ -95,10 +100,10 @@ private final class CheckPipelineLifecycleRunnable extends AbstractPipelineLifec @Override protected void runBlocking() { - checkJobAPI.persistJobItemProgress(jobItemContext); + jobItemManager.persistProgress(jobItemContext); JobType jobType = PipelineJobIdUtils.parseJobType(parentJobId); InventoryIncrementalJobAPI jobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, jobType.getType()); - PipelineJobConfiguration parentJobConfig = jobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(parentJobId)); + PipelineJobConfiguration parentJobConfig = new PipelineJobManager(jobAPI).getJobConfiguration(parentJobId); try { PipelineDataConsistencyChecker checker = jobAPI.buildPipelineDataConsistencyChecker( parentJobConfig, jobAPI.buildPipelineProcessContext(parentJobConfig), jobItemContext.getProgressContext()); @@ -133,7 +138,7 @@ public void onSuccess() { } log.info("onSuccess, check job id: {}, parent job id: {}", checkJobId, parentJobId); jobItemContext.setStatus(JobStatus.FINISHED); - checkJobAPI.persistJobItemProgress(jobItemContext); + jobItemManager.persistProgress(jobItemContext); jobManager.stop(checkJobId); } @@ -146,7 +151,7 @@ public void onFailure(final Throwable throwable) { return; } log.info("onFailure, check job id: {}, parent job id: {}", checkJobId, parentJobId, throwable); - jobManager.updateJobItemErrorMessage(checkJobId, 0, throwable); + jobItemManager.updateErrorMessage(checkJobId, 0, throwable); jobManager.stop(checkJobId); } } diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java index adef596081505..439cc36ad8168 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJob.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceManager; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.core.job.AbstractSimplePipelineJob; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.task.runner.InventoryIncrementalTasksRunner; import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; @@ -46,6 +47,8 @@ public final class MigrationJob extends AbstractSimplePipelineJob { private final MigrationJobAPI jobAPI = new MigrationJobAPI(); + private final PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); + private final PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); // Shared by all sharding items @@ -59,7 +62,7 @@ public MigrationJob(final String jobId) { protected InventoryIncrementalJobItemContext buildPipelineJobItemContext(final ShardingContext shardingContext) { int shardingItem = shardingContext.getShardingItem(); MigrationJobConfiguration jobConfig = new YamlMigrationJobConfigurationSwapper().swapToObject(shardingContext.getJobParameter()); - Optional initProgress = jobAPI.getJobItemProgress(shardingContext.getJobName(), shardingItem); + Optional initProgress = jobItemManager.getProgress(shardingContext.getJobName(), shardingItem); MigrationProcessContext jobProcessContext = jobAPI.buildPipelineProcessContext(jobConfig); MigrationTaskConfiguration taskConfig = jobAPI.buildTaskConfiguration(jobConfig, shardingItem, jobProcessContext.getPipelineProcessConfig()); return new MigrationJobItemContext(jobConfig, shardingItem, initProgress.orElse(null), jobProcessContext, taskConfig, dataSourceManager); diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java index 52527c619011f..6ffa6371715b4 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java @@ -53,9 +53,10 @@ import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext; import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; -import org.apache.shardingsphere.data.pipeline.core.job.service.impl.AbstractInventoryIncrementalJobAPIImpl; import org.apache.shardingsphere.data.pipeline.core.metadata.PipelineDataSourcePersistService; import org.apache.shardingsphere.data.pipeline.scenario.migration.MigrationJob; import org.apache.shardingsphere.data.pipeline.scenario.migration.MigrationJobId; @@ -66,7 +67,6 @@ import org.apache.shardingsphere.data.pipeline.scenario.migration.context.MigrationProcessContext; import org.apache.shardingsphere.data.pipeline.yaml.job.YamlMigrationJobConfiguration; import org.apache.shardingsphere.data.pipeline.yaml.job.YamlMigrationJobConfigurationSwapper; -import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; @@ -109,7 +109,7 @@ * Migration job API. */ @Slf4j -public final class MigrationJobAPI extends AbstractInventoryIncrementalJobAPIImpl { +public final class MigrationJobAPI implements InventoryIncrementalJobAPI { private final PipelineDataSourcePersistService dataSourcePersistService = new PipelineDataSourcePersistService(); @@ -210,11 +210,10 @@ private Map buildTargetTableSchemaMap(final Map sourceTables = new LinkedList<>(); - getJobConfiguration(jobConfigPOJO).getJobShardingDataNodes().forEach(each -> each.getEntries().forEach(entry -> entry.getDataNodes() - .forEach(dataNode -> sourceTables.add(DataNodeUtils.formatWithSchema(dataNode))))); + new PipelineJobManager(this).getJobConfiguration(jobId).getJobShardingDataNodes() + .forEach(each -> each.getEntries().forEach(entry -> entry.getDataNodes().forEach(dataNode -> sourceTables.add(DataNodeUtils.formatWithSchema(dataNode))))); return new TableBasedPipelineJobInfo(jobMetaData, String.join(",", sourceTables)); } @@ -227,8 +226,8 @@ public void extendYamlJobConfiguration(final PipelineContextKey contextKey, fina } @Override - public MigrationJobConfiguration getJobConfiguration(final JobConfigurationPOJO jobConfigPOJO) { - return new YamlMigrationJobConfigurationSwapper().swapToObject(jobConfigPOJO.getJobParameter()); + public YamlMigrationJobConfigurationSwapper getYamlJobConfigurationSwapper() { + return new YamlMigrationJobConfigurationSwapper(); } @Override @@ -277,7 +276,7 @@ private ImporterConfiguration buildImporterConfiguration(final MigrationJobConfi @Override public MigrationProcessContext buildPipelineProcessContext(final PipelineJobConfiguration pipelineJobConfig) { - PipelineProcessConfiguration processConfig = showProcessConfiguration(PipelineJobIdUtils.parseContextKey(pipelineJobConfig.getJobId())); + PipelineProcessConfiguration processConfig = new InventoryIncrementalJobManager(this).showProcessConfiguration(PipelineJobIdUtils.parseContextKey(pipelineJobConfig.getJobId())); return new MigrationProcessContext(pipelineJobConfig.getJobId(), processConfig); } @@ -323,7 +322,7 @@ private void dropCheckJobs(final String jobId) { } private void cleanTempTableOnRollback(final String jobId) throws SQLException { - MigrationJobConfiguration jobConfig = getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)); + MigrationJobConfiguration jobConfig = new PipelineJobManager(this).getJobConfiguration(jobId); PipelineCommonSQLBuilder pipelineSQLBuilder = new PipelineCommonSQLBuilder(jobConfig.getTargetDatabaseType()); TableAndSchemaNameMapper mapping = new TableAndSchemaNameMapper(jobConfig.getTargetTableSchemaMap()); try ( @@ -347,7 +346,7 @@ public void commit(final String jobId) { PipelineJobManager jobManager = new PipelineJobManager(this); jobManager.stop(jobId); dropCheckJobs(jobId); - MigrationJobConfiguration jobConfig = getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)); + MigrationJobConfiguration jobConfig = new PipelineJobManager(this).getJobConfiguration(jobId); refreshTableMetadata(jobId, jobConfig.getTargetDatabaseName()); jobManager.drop(jobId); log.info("Commit cost {} ms", System.currentTimeMillis() - startTimeMillis); @@ -445,7 +444,7 @@ public void refreshTableMetadata(final String jobId, final String databaseName) } @Override - public Class getPipelineJobClass() { + public Class getJobClass() { return MigrationJob.class; } diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java index addf2dee11af1..c189075c554b0 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java @@ -41,6 +41,7 @@ import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryCheckParameter; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryChecker; import org.apache.shardingsphere.data.pipeline.core.exception.data.PipelineTableDataConsistencyCheckLoadingFailedException; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; import org.apache.shardingsphere.data.pipeline.common.spi.algorithm.JobRateLimitAlgorithm; @@ -98,7 +99,7 @@ public Map check(final String algorithm } private long getRecordsCount() { - Map jobProgress = new MigrationJobAPI().getJobProgress(jobConfig); + Map jobProgress = new InventoryIncrementalJobManager(new MigrationJobAPI()).getJobProgress(jobConfig); return jobProgress.values().stream().filter(Objects::nonNull).mapToLong(InventoryIncrementalJobItemProgress::getProcessedRecordsCount).sum(); } diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/prepare/MigrationJobPreparer.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/prepare/MigrationJobPreparer.java index e6ef884220a5e..358f340518048 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/prepare/MigrationJobPreparer.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/prepare/MigrationJobPreparer.java @@ -45,6 +45,8 @@ import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.InventoryDumperContext; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.preparer.InventoryTaskSplitter; import org.apache.shardingsphere.data.pipeline.core.preparer.PipelineJobPreparerUtils; import org.apache.shardingsphere.data.pipeline.core.preparer.datasource.PrepareTargetSchemasParameter; @@ -81,6 +83,10 @@ public final class MigrationJobPreparer { private final MigrationJobAPI jobAPI = new MigrationJobAPI(); + private final PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); + + private final InventoryIncrementalJobManager inventoryIncrementalJobManager = new InventoryIncrementalJobManager(jobAPI); + /** * Do prepare work. * @@ -101,9 +107,12 @@ public void prepare(final MigrationJobItemContext jobItemContext) throws SQLExce PipelineJobCenter.stop(jobItemContext.getJobId()); return; } - prepareIncremental(jobItemContext); + boolean isIncrementalSupported = PipelineJobPreparerUtils.isIncrementalSupported(jobItemContext.getJobConfig().getSourceDatabaseType()); + if (isIncrementalSupported) { + prepareIncremental(jobItemContext); + } initInventoryTasks(jobItemContext); - if (PipelineJobPreparerUtils.isIncrementalSupported(jobItemContext.getJobConfig().getSourceDatabaseType())) { + if (isIncrementalSupported) { initIncrementalTasks(jobItemContext); if (jobItemContext.isStopping()) { PipelineJobCenter.stop(jobItemContext.getJobId()); @@ -119,20 +128,20 @@ private void prepareAndCheckTargetWithLock(final MigrationJobItemContext jobItem MigrationJobConfiguration jobConfig = jobItemContext.getJobConfig(); String jobId = jobConfig.getJobId(); LockContext lockContext = PipelineContextManager.getContext(PipelineJobIdUtils.parseContextKey(jobId)).getContextManager().getInstanceContext().getLockContext(); - if (!jobAPI.getJobItemProgress(jobId, jobItemContext.getShardingItem()).isPresent()) { - jobAPI.persistJobItemProgress(jobItemContext); + if (!jobItemManager.getProgress(jobId, jobItemContext.getShardingItem()).isPresent()) { + jobItemManager.persistProgress(jobItemContext); } LockDefinition lockDefinition = new GlobalLockDefinition(String.format(GlobalLockNames.PREPARE.getLockName(), jobConfig.getJobId())); long startTimeMillis = System.currentTimeMillis(); if (lockContext.tryLock(lockDefinition, 600000)) { log.info("try lock success, jobId={}, shardingItem={}, cost {} ms", jobId, jobItemContext.getShardingItem(), System.currentTimeMillis() - startTimeMillis); try { - JobOffsetInfo offsetInfo = jobAPI.getJobOffsetInfo(jobId); + JobOffsetInfo offsetInfo = inventoryIncrementalJobManager.getJobOffsetInfo(jobId); if (!offsetInfo.isTargetSchemaTableCreated()) { jobItemContext.setStatus(JobStatus.PREPARING); - jobAPI.updateJobItemStatus(jobId, jobItemContext.getShardingItem(), JobStatus.PREPARING); + jobItemManager.updateStatus(jobId, jobItemContext.getShardingItem(), JobStatus.PREPARING); prepareAndCheckTarget(jobItemContext); - jobAPI.persistJobOffsetInfo(jobId, new JobOffsetInfo(true)); + inventoryIncrementalJobManager.persistJobOffsetInfo(jobId, new JobOffsetInfo(true)); } } finally { log.info("unlock, jobId={}, shardingItem={}, cost {} ms", jobId, jobItemContext.getShardingItem(), System.currentTimeMillis() - startTimeMillis); diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java index c5dce6c130669..e7e123319b2b0 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java @@ -17,13 +17,13 @@ package org.apache.shardingsphere.data.pipeline.yaml.job; -import org.apache.shardingsphere.data.pipeline.common.datasource.yaml.YamlPipelineDataSourceConfigurationSwapper; import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine; +import org.apache.shardingsphere.data.pipeline.common.datasource.yaml.YamlPipelineDataSourceConfigurationSwapper; +import org.apache.shardingsphere.data.pipeline.core.job.yaml.YamlPipelineJobConfigurationSwapper; import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; -import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; import java.util.LinkedHashMap; import java.util.Map.Entry; @@ -32,7 +32,7 @@ /** * YAML migration job configuration swapper. */ -public final class YamlMigrationJobConfigurationSwapper implements YamlConfigurationSwapper { +public final class YamlMigrationJobConfigurationSwapper implements YamlPipelineJobConfigurationSwapper { private final YamlPipelineDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlPipelineDataSourceConfigurationSwapper(); @@ -67,12 +67,7 @@ public MigrationJobConfiguration swapToObject(final YamlMigrationJobConfiguratio yamlConfig.getConcurrency(), yamlConfig.getRetryTimes()); } - /** - * Swap to migration job configuration from YAML text. - * - * @param jobParam job parameter YAML text - * @return migration job configuration - */ + @Override public MigrationJobConfiguration swapToObject(final String jobParam) { return swapToObject(YamlEngine.unmarshal(jobParam, YamlMigrationJobConfiguration.class, true)); } diff --git a/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/query/ShowTrafficRuleExecutorTest.java b/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/query/ShowTrafficRuleExecutorTest.java index 95f8d5aff0e99..0fc8570d3f4ae 100644 --- a/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/query/ShowTrafficRuleExecutorTest.java +++ b/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/query/ShowTrafficRuleExecutorTest.java @@ -58,14 +58,14 @@ void assertExecute() { assertThat(row.getCell(3), is("SQL_MATCH")); assertThat(row.getCell(4), is("{\"sql\":\"select * from t_order\"}")); assertThat(row.getCell(5), is("RANDOM")); - assertThat(row.getCell(6), is("{}")); + assertThat(row.getCell(6), is("")); row = iterator.next(); assertThat(row.getCell(1), is("rule_name_2")); assertThat(row.getCell(2), is("oltp")); assertThat(row.getCell(3), is("SQL_HINT")); - assertThat(row.getCell(4), is("{}")); + assertThat(row.getCell(4), is("")); assertThat(row.getCell(5), is("ROBIN")); - assertThat(row.getCell(6), is("{}")); + assertThat(row.getCell(6), is("")); } @Test diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 index c9cddf286cdcd..a0620e1440193 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 @@ -22,8 +22,8 @@ import BaseRule, DCLStatement, DMLStatement; createView : CREATE (OR REPLACE)? (NO? FORCE)? (EDITIONING | EDITIONABLE EDITIONING? | NONEDITIONABLE)? VIEW viewName ( SHARING EQ_ (METADATA | DATA | EXTENDED DATA | NONE))? - ( LP_ (alias (VISIBLE | INVISIBLE)? inlineConstraint* (COMMA_ alias (VISIBLE | INVISIBLE)? inlineConstraint*)* - | outOfLineConstraint) RP_ | objectViewClause | xmlTypeViewClause)? + ( LP_ ((alias (VISIBLE | INVISIBLE)? inlineConstraint* | outOfLineConstraint) (COMMA_ (alias (VISIBLE | INVISIBLE)? inlineConstraint* | outOfLineConstraint))*) RP_ + | objectViewClause | xmlTypeViewClause)? ( DEFAULT COLLATION collationName)? (BEQUEATH (CURRENT_USER | DEFINER))? AS select subqueryRestrictionClause? ( CONTAINER_MAP | CONTAINERS_DEFAULT)? ; diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/ShardingRuleConfigurationImportChecker.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/ShardingRuleConfigurationImportChecker.java index 6d95d37e16f05..2dc87ee451afe 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/ShardingRuleConfigurationImportChecker.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/ShardingRuleConfigurationImportChecker.java @@ -21,15 +21,15 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; import org.apache.shardingsphere.infra.config.algorithm.AlgorithmConfiguration; import org.apache.shardingsphere.infra.datanode.DataNode; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; -import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.expr.core.InlineExpressionParserFactory; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingAutoTableRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableRuleConfiguration; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.spi.ShardingAlgorithm; import java.util.Collection; diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowMigrationRuleExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowMigrationRuleExecutor.java index e592dd7ee12fb..d9e5391261367 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowMigrationRuleExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowMigrationRuleExecutor.java @@ -20,6 +20,7 @@ import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; import org.apache.shardingsphere.distsql.handler.ral.query.QueryableRALExecutor; import org.apache.shardingsphere.distsql.statement.ral.queryable.ShowMigrationRuleStatement; @@ -39,7 +40,7 @@ public final class ShowMigrationRuleExecutor implements QueryableRALExecutor getRows(final ShowMigrationRuleStatement sqlStatement) { - PipelineProcessConfiguration processConfig = ((InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "MIGRATION")) + PipelineProcessConfiguration processConfig = new InventoryIncrementalJobManager((InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "MIGRATION")) .showProcessConfiguration(new PipelineContextKey(InstanceType.PROXY)); Collection result = new LinkedList<>(); result.add(new LocalDataQueryResultRow(getString(processConfig.getRead()), getString(processConfig.getWrite()), getString(processConfig.getStreamChannel()))); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterInventoryIncrementalRuleUpdater.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterInventoryIncrementalRuleUpdater.java index 4931418c4994a..52648f2cc95c3 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterInventoryIncrementalRuleUpdater.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterInventoryIncrementalRuleUpdater.java @@ -20,6 +20,7 @@ import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobAPI; import org.apache.shardingsphere.distsql.handler.ral.update.RALUpdater; import org.apache.shardingsphere.distsql.statement.ral.updatable.AlterInventoryIncrementalRuleStatement; @@ -34,9 +35,9 @@ public final class AlterInventoryIncrementalRuleUpdater implements RALUpdater - + diff --git a/test/e2e/sql/src/test/resources/cases/rdl/rdl-integration-register.xml b/test/e2e/sql/src/test/resources/cases/rdl/rdl-integration-register.xml index 1f3d6fc76a683..2be60a2721b85 100644 --- a/test/e2e/sql/src/test/resources/cases/rdl/rdl-integration-register.xml +++ b/test/e2e/sql/src/test/resources/cases/rdl/rdl-integration-register.xml @@ -16,7 +16,7 @@ --> - + diff --git a/test/e2e/sql/src/test/resources/env/scenario/db/proxy/conf/mysql/config-db.yaml b/test/e2e/sql/src/test/resources/env/scenario/db/proxy/conf/mysql/config-db.yaml index 1e23711f3cb38..168704b0dbf22 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/db/proxy/conf/mysql/config-db.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/db/proxy/conf/mysql/config-db.yaml @@ -19,7 +19,7 @@ databaseName: db dataSources: ds_0: - url: jdbc:mysql://mysql.db.host:3306/db_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_1: - url: jdbc:mysql://mysql.db.host:3306/db_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_2: - url: jdbc:mysql://mysql.db.host:3306/db_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_3: - url: jdbc:mysql://mysql.db.host:3306/db_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_4: - url: jdbc:mysql://mysql.db.host:3306/db_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_5: - url: jdbc:mysql://mysql.db.host:3306/db_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_6: - url: jdbc:mysql://mysql.db.host:3306/db_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_7: - url: jdbc:mysql://mysql.db.host:3306/db_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_8: - url: jdbc:mysql://mysql.db.host:3306/db_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 50 minPoolSize: 2 ds_9: - url: jdbc:mysql://mysql.db.host:3306/db_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.db.host:3306/db_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting.yaml b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting.yaml index 27dbd5211de6d..3d1b42a801f87 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting.yaml @@ -19,7 +19,7 @@ databaseName: dbtbl_with_readwrite_splitting dataSources: write_ds_0: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_1: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_2: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_3: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_4: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_5: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_6: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_7: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_8: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_9: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/write_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -109,7 +109,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_0: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -118,7 +118,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_1: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -127,7 +127,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_2: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -136,7 +136,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_3: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -145,7 +145,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_4: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -154,7 +154,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_5: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -163,7 +163,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_6: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -172,7 +172,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_7: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -181,7 +181,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_8: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -190,7 +190,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_9: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting.host:3306/read_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting_and_encrypt/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting-and-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting_and_encrypt/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting-and-encrypt.yaml index d4adb8cffe92f..4405efa9cdd16 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting_and_encrypt/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting-and-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting_and_encrypt/proxy/conf/mysql/config-dbtbl-with-readwrite-splitting-and-encrypt.yaml @@ -19,7 +19,7 @@ databaseName: dbtbl_with_readwrite_splitting_and_encrypt dataSources: encrypt_write_ds_0: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 20 minPoolSize: 2 encrypt_write_ds_1: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_2: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_3: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_4: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_5: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_6: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_7: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_8: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_write_ds_9: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_write_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -109,7 +109,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_0: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -118,7 +118,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_1: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -127,7 +127,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_2: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -136,7 +136,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_3: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -145,7 +145,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_4: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -154,7 +154,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_5: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -163,7 +163,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_6: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -172,7 +172,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_7: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -181,7 +181,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_8: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -190,7 +190,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds_9: - url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.dbtbl_with_readwrite_splitting_and_encrypt.host:3306/encrypt_read_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/empty_rules/proxy/conf/mysql/config-empty-rules.yaml b/test/e2e/sql/src/test/resources/env/scenario/empty_rules/proxy/conf/mysql/config-empty-rules.yaml index 2f4946c4b7d58..9f54eda9c6d19 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/empty_rules/proxy/conf/mysql/config-empty-rules.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/empty_rules/proxy/conf/mysql/config-empty-rules.yaml @@ -19,7 +19,7 @@ databaseName: empty_rules dataSources: write_ds_0: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_1: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_2: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_3: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_4: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_5: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_6: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_7: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_8: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_ds_9: - url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/write_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -109,7 +109,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_0: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -118,7 +118,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_1: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -127,7 +127,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_2: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -136,7 +136,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_3: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -145,7 +145,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_4: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -154,7 +154,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_5: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -163,7 +163,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_6: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -172,7 +172,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_7: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -181,7 +181,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_8: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -190,7 +190,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_ds_9: - url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/read_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -199,7 +199,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 rdl_test_0: - url: jdbc:mysql://mysql.empty_rules.host:3306/rdl_test_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.empty_rules.host:3306/rdl_test_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml index 52e62adb6160e..ee5acbda66843 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml @@ -19,7 +19,7 @@ databaseName: encrypt dataSources: encrypt: - url: jdbc:mysql://mysql.encrypt.host:3306/encrypt?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.encrypt.host:3306/encrypt?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt_and_readwrite_splitting/proxy/conf/mysql/config-encrypt-readwrite-splitting.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt_and_readwrite_splitting/proxy/conf/mysql/config-encrypt-readwrite-splitting.yaml index b45a8c04f4a84..c847752a55527 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt_and_readwrite_splitting/proxy/conf/mysql/config-encrypt-readwrite-splitting.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt_and_readwrite_splitting/proxy/conf/mysql/config-encrypt-readwrite-splitting.yaml @@ -19,7 +19,7 @@ databaseName: encrypt_and_readwrite_splitting dataSources: encrypt_write_ds: - url: jdbc:mysql://mysql.encrypt_and_readwrite_splitting.host:3306/encrypt_write_ds?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.encrypt_and_readwrite_splitting.host:3306/encrypt_write_ds?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_read_ds: - url: jdbc:mysql://mysql.encrypt_and_readwrite_splitting.host:3306/encrypt_read_ds?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.encrypt_and_readwrite_splitting.host:3306/encrypt_read_ds?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt_shadow/proxy/conf/mysql/config-encrypt-shadow.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt_shadow/proxy/conf/mysql/config-encrypt-shadow.yaml index a8f4c5ebf9ebd..3d6804693bc6c 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt_shadow/proxy/conf/mysql/config-encrypt-shadow.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt_shadow/proxy/conf/mysql/config-encrypt-shadow.yaml @@ -19,7 +19,7 @@ databaseName: encrypt_shadow dataSources: db: - url: jdbc:mysql://mysql.encrypt_shadow.host:3306/db?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.encrypt_shadow.host:3306/db?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_shadow_db: - url: jdbc:mysql://mysql.encrypt_shadow.host:3306/encrypt_shadow_db?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.encrypt_shadow.host:3306/encrypt_shadow_db?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/mask/proxy/conf/mysql/config-mask.yaml b/test/e2e/sql/src/test/resources/env/scenario/mask/proxy/conf/mysql/config-mask.yaml index cb422ba05c57a..af58ef811b77f 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/mask/proxy/conf/mysql/config-mask.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/mask/proxy/conf/mysql/config-mask.yaml @@ -19,7 +19,7 @@ databaseName: mask dataSources: mask: - url: jdbc:mysql://mysql.mask.host:3306/mask?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask.host:3306/mask?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt/proxy/conf/mysql/config-mask-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt/proxy/conf/mysql/config-mask-encrypt.yaml index 9ab9d295c2166..9c2c17c503e37 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt/proxy/conf/mysql/config-mask-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt/proxy/conf/mysql/config-mask-encrypt.yaml @@ -19,7 +19,7 @@ databaseName: mask_encrypt dataSources: mask_encrypt: - url: jdbc:mysql://mysql.mask_encrypt.host:3306/mask_encrypt?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt.host:3306/mask_encrypt?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt_sharding/proxy/conf/mysql/config-mask-encrypt-sharding.yaml b/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt_sharding/proxy/conf/mysql/config-mask-encrypt-sharding.yaml index 6a8d7427bb806..a051358fb7a93 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt_sharding/proxy/conf/mysql/config-mask-encrypt-sharding.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/mask_encrypt_sharding/proxy/conf/mysql/config-mask-encrypt-sharding.yaml @@ -19,7 +19,7 @@ databaseName: mask_encrypt_sharding dataSources: mask_encrypt_ds_0: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 10 minPoolSize: 2 mask_encrypt_ds_1: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_2: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_3: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_4: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_5: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_6: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_7: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_8: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_encrypt_ds_9: - url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_encrypt_sharding.host:3306/mask_encrypt_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/mask_sharding/proxy/conf/mysql/config-mask-sharding.yaml b/test/e2e/sql/src/test/resources/env/scenario/mask_sharding/proxy/conf/mysql/config-mask-sharding.yaml index 7e55eae321d35..443762d989e86 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/mask_sharding/proxy/conf/mysql/config-mask-sharding.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/mask_sharding/proxy/conf/mysql/config-mask-sharding.yaml @@ -19,7 +19,7 @@ databaseName: mask_sharding dataSources: mask_ds_0: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 10 minPoolSize: 2 mask_ds_1: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_2: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_3: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_4: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_5: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_6: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_7: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_8: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 mask_ds_9: - url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.mask_sharding.host:3306/mask_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/proxy/conf/mysql/config-passthrough.yaml b/test/e2e/sql/src/test/resources/env/scenario/passthrough/proxy/conf/mysql/config-passthrough.yaml index f0b6f0ddbbe3a..97ab5b432a5bc 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/proxy/conf/mysql/config-passthrough.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/proxy/conf/mysql/config-passthrough.yaml @@ -19,7 +19,7 @@ databaseName: passthrough dataSources: passthrough: - url: jdbc:mysql://mysql.passthrough.host:3306/passthrough?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.passthrough.host:3306/passthrough?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/rdl_empty_rules/proxy/conf/mysql/config-rdl-empty-rules.yaml b/test/e2e/sql/src/test/resources/env/scenario/rdl_empty_rules/proxy/conf/mysql/config-rdl-empty-rules.yaml index bc86767055020..c25fafd5ae906 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/rdl_empty_rules/proxy/conf/mysql/config-rdl-empty-rules.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/rdl_empty_rules/proxy/conf/mysql/config-rdl-empty-rules.yaml @@ -19,7 +19,7 @@ databaseName: rdl_empty_rules dataSources: ds_0: - url: jdbc:mysql://mysql.rdl_empty_rules.host:3306/rdl_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.rdl_empty_rules.host:3306/rdl_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 ds_1: - url: jdbc:mysql://mysql.rdl_empty_rules.host:3306/rdl_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.rdl_empty_rules.host:3306/rdl_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 ds_2: - url: jdbc:mysql://mysql.rdl_empty_rules.host:3306/rdl_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.rdl_empty_rules.host:3306/rdl_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting/proxy/conf/mysql/config-readwrite-splitting.yaml b/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting/proxy/conf/mysql/config-readwrite-splitting.yaml index ab1185be21b73..ac0374abf22cd 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting/proxy/conf/mysql/config-readwrite-splitting.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting/proxy/conf/mysql/config-readwrite-splitting.yaml @@ -19,7 +19,7 @@ databaseName: readwrite_splitting dataSources: write_ds: - url: jdbc:mysql://mysql.readwrite_splitting.host:3306/write_ds?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting.host:3306/write_ds?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_0: - url: jdbc:mysql://mysql.readwrite_splitting.host:3306/read_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting.host:3306/read_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_1: - url: jdbc:mysql://mysql.readwrite_splitting.host:3306/read_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting.host:3306/read_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting_and_shadow/proxy/conf/mysql/config-shadow.yaml b/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting_and_shadow/proxy/conf/mysql/config-shadow.yaml index 251326bd72574..5542391d940a2 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting_and_shadow/proxy/conf/mysql/config-shadow.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/readwrite_splitting_and_shadow/proxy/conf/mysql/config-shadow.yaml @@ -19,7 +19,7 @@ databaseName: readwrite_splitting_and_shadow dataSources: write_db: - url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/write_db?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/write_db?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 write_db_shadow: - url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/write_db_shadow?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/write_db_shadow?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_0: - url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/read_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/read_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 read_1: - url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/read_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.readwrite_splitting_and_shadow.host:3306/read_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/shadow/proxy/conf/mysql/config-shadow.yaml b/test/e2e/sql/src/test/resources/env/scenario/shadow/proxy/conf/mysql/config-shadow.yaml index 7465a2051cef2..799a48594eef9 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/shadow/proxy/conf/mysql/config-shadow.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/shadow/proxy/conf/mysql/config-shadow.yaml @@ -19,7 +19,7 @@ databaseName: shadow dataSources: db: - url: jdbc:mysql://mysql.shadow.host:3306/db?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.shadow.host:3306/db?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db: - url: jdbc:mysql://mysql.shadow.host:3306/shadow_db?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.shadow.host:3306/shadow_db?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/sharding_and_encrypt/proxy/conf/mysql/config-sharding-and-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/sharding_and_encrypt/proxy/conf/mysql/config-sharding-and-encrypt.yaml index 9314c93c188c8..5cf05cde67f40 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/sharding_and_encrypt/proxy/conf/mysql/config-sharding-and-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/sharding_and_encrypt/proxy/conf/mysql/config-sharding-and-encrypt.yaml @@ -19,7 +19,7 @@ databaseName: sharding_and_encrypt dataSources: encrypt_ds_0: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 20 minPoolSize: 2 encrypt_ds_1: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_2: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_3: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_4: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_5: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_6: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_7: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_8: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 encrypt_ds_9: - url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_encrypt.host:3306/encrypt_ds_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/sharding_and_shadow/proxy/conf/mysql/config-sharding-and-shadow.yaml b/test/e2e/sql/src/test/resources/env/scenario/sharding_and_shadow/proxy/conf/mysql/config-sharding-and-shadow.yaml index 660cae1aa74c4..317464a52e1bb 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/sharding_and_shadow/proxy/conf/mysql/config-sharding-and-shadow.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/sharding_and_shadow/proxy/conf/mysql/config-sharding-and-shadow.yaml @@ -19,7 +19,7 @@ databaseName: sharding_and_shadow dataSources: db_0: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_1: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_2: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_3: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_4: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_5: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_6: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_7: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_8: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_9: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/db_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -109,7 +109,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_0: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -118,7 +118,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_1: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -127,7 +127,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_2: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -136,7 +136,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_3: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -145,7 +145,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_4: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -154,7 +154,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_5: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -163,7 +163,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_6: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -172,7 +172,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_7: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -181,7 +181,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_8: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -190,7 +190,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_9: - url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_and_shadow.host:3306/shadow_db_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/sharding_encrypt_shadow/proxy/conf/mysql/config-sharding-encrypt-shadow.yaml b/test/e2e/sql/src/test/resources/env/scenario/sharding_encrypt_shadow/proxy/conf/mysql/config-sharding-encrypt-shadow.yaml index 5cc1692efe212..4b4cbe9c34d1d 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/sharding_encrypt_shadow/proxy/conf/mysql/config-sharding-encrypt-shadow.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/sharding_encrypt_shadow/proxy/conf/mysql/config-sharding-encrypt-shadow.yaml @@ -19,7 +19,7 @@ databaseName: sharding_encrypt_shadow dataSources: db_0: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -28,7 +28,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_1: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -37,7 +37,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_2: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -46,7 +46,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_3: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -55,7 +55,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_4: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -64,7 +64,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_5: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -73,7 +73,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_6: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -82,7 +82,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_7: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -91,7 +91,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_8: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -100,7 +100,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 db_9: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/db_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -109,7 +109,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_0: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_0?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -118,7 +118,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_1: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_1?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -127,7 +127,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_2: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_2?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_2?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -136,7 +136,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_3: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_3?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_3?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -145,7 +145,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_4: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_4?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_4?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -154,7 +154,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_5: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_5?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_5?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -163,7 +163,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_6: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_6?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_6?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -172,7 +172,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_7: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_7?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_7?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -181,7 +181,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_8: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_8?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_8?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -190,7 +190,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 shadow_db_9: - url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_9?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.sharding_encrypt_shadow.host:3306/shadow_db_9?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/sql/src/test/resources/env/scenario/tbl/proxy/conf/mysql/config-tbl.yaml b/test/e2e/sql/src/test/resources/env/scenario/tbl/proxy/conf/mysql/config-tbl.yaml index 748285f54c65e..6086abd266486 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/tbl/proxy/conf/mysql/config-tbl.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/tbl/proxy/conf/mysql/config-tbl.yaml @@ -19,7 +19,7 @@ databaseName: tbl dataSources: tbl: - url: jdbc:mysql://mysql.tbl.host:3306/tbl?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.tbl.host:3306/tbl?useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/it/parser/src/main/resources/case/dal/explain.xml b/test/it/parser/src/main/resources/case/dal/explain.xml index 656619f37b7d4..d8c4eee4b635b 100644 --- a/test/it/parser/src/main/resources/case/dal/explain.xml +++ b/test/it/parser/src/main/resources/case/dal/explain.xml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - +
@@ -67,7 +67,7 @@ - +
@@ -87,7 +87,7 @@ - + - +
@@ -137,7 +137,7 @@ - +
@@ -154,7 +154,7 @@ - +
@@ -192,6 +192,7 @@ + + +
+
@@ -273,6 +277,7 @@ +
@@ -288,6 +293,7 @@ +
@@ -303,32 +309,40 @@ + + + + + + + + @@ -353,6 +367,7 @@ + @@ -426,6 +441,7 @@ + + + + +
@@ -549,6 +569,7 @@ +
@@ -564,6 +585,7 @@ +
@@ -579,6 +601,7 @@ +
@@ -594,6 +617,7 @@ +
@@ -620,6 +644,7 @@ +
@@ -635,6 +660,7 @@ +
@@ -650,6 +676,7 @@ +
@@ -665,6 +692,7 @@ +
@@ -680,6 +708,7 @@ +
@@ -726,6 +755,7 @@ +
@@ -746,6 +776,7 @@ +
@@ -766,6 +797,7 @@ +
@@ -786,4 +818,184 @@ + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + + + + + * + + + + + + + + + + + + + + + + = + + + +
+
+ + + + diff --git a/test/it/parser/src/main/resources/case/ddl/create-materialized-view-log.xml b/test/it/parser/src/main/resources/case/ddl/create-materialized-view-log.xml index 68c24f03f1dd3..427c373a0818f 100644 --- a/test/it/parser/src/main/resources/case/ddl/create-materialized-view-log.xml +++ b/test/it/parser/src/main/resources/case/ddl/create-materialized-view-log.xml @@ -26,5 +26,5 @@ - + diff --git a/test/it/parser/src/main/resources/case/ddl/create-materialized-view.xml b/test/it/parser/src/main/resources/case/ddl/create-materialized-view.xml index 1224d0d80eb4e..9c0b1d8443336 100644 --- a/test/it/parser/src/main/resources/case/ddl/create-materialized-view.xml +++ b/test/it/parser/src/main/resources/case/ddl/create-materialized-view.xml @@ -25,4 +25,12 @@ + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/ddl/create-view.xml b/test/it/parser/src/main/resources/case/ddl/create-view.xml index b02db9f89450f..dd05e5686e3c2 100644 --- a/test/it/parser/src/main/resources/case/ddl/create-view.xml +++ b/test/it/parser/src/main/resources/case/ddl/create-view.xml @@ -273,6 +273,460 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/select.xml b/test/it/parser/src/main/resources/case/dml/select.xml index 8a497f44e3cbc..fa588d4f47a62 100644 --- a/test/it/parser/src/main/resources/case/dml/select.xml +++ b/test/it/parser/src/main/resources/case/dml/select.xml @@ -7174,4 +7174,31 @@ + + diff --git a/test/it/parser/src/main/resources/sql/supported/dal/explain.xml b/test/it/parser/src/main/resources/sql/supported/dal/explain.xml index 9176fb3fc0d4f..e8cf5fdf6759f 100644 --- a/test/it/parser/src/main/resources/sql/supported/dal/explain.xml +++ b/test/it/parser/src/main/resources/sql/supported/dal/explain.xml @@ -65,4 +65,10 @@ + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view-log.xml b/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view-log.xml index 2415b6236d22b..7161a04d5e75b 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view-log.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view-log.xml @@ -39,4 +39,9 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view.xml b/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view.xml index 2febdf2bb0162..c010002ed91a1 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/create-materialized-view.xml @@ -43,4 +43,83 @@ FROM SH.SALES, SH.CUSTOMERS WHERE SH.CUSTOMERS.CUST_ID = SH.SALES.CUST_ID GROUP BY SH.SALES.PROD_ID, SH.CUSTOMERS.CUST_ID;" db-types="Oracle" /> + + + + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml b/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml index c9c08280ff9f0..b48261a6313a8 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml @@ -27,12 +27,33 @@ - + + + + + + + + + + + + + - + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/select.xml b/test/it/parser/src/main/resources/sql/supported/dml/select.xml index 00db43c5587ca..cff56d26a3f10 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/select.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/select.xml @@ -217,4 +217,5 @@ + diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java index 169f76ab6d52d..fe9ea2db83c66 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java @@ -18,10 +18,13 @@ package org.apache.shardingsphere.test.it.data.pipeline.scenario.consistencycheck.api.impl; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.ConsistencyCheckJobId; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.impl.ConsistencyCheckJobAPI; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.pojo.CreateConsistencyCheckJobParameter; @@ -49,7 +52,9 @@ class ConsistencyCheckJobAPITest { - private final ConsistencyCheckJobAPI checkJobAPI = new ConsistencyCheckJobAPI(); + private final ConsistencyCheckJobAPI jobAPI = new ConsistencyCheckJobAPI(); + + private final PipelineJobItemManager jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); private final YamlMigrationJobConfigurationSwapper jobConfigSwapper = new YamlMigrationJobConfigurationSwapper(); @@ -62,9 +67,9 @@ public static void beforeClass() { void assertCreateJobConfig() { MigrationJobConfiguration parentJobConfig = jobConfigSwapper.swapToObject(JobConfigurationBuilder.createYamlMigrationJobConfiguration()); String parentJobId = parentJobConfig.getJobId(); - String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, + String checkJobId = jobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, parentJobConfig.getSourceDatabaseType(), parentJobConfig.getTargetDatabaseType())); - ConsistencyCheckJobConfiguration checkJobConfig = checkJobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(checkJobId)); + ConsistencyCheckJobConfiguration checkJobConfig = new PipelineJobManager(jobAPI).getJobConfiguration(checkJobId); int expectedSequence = ConsistencyCheckSequence.MIN_SEQUENCE; String expectCheckJobId = new ConsistencyCheckJobId(PipelineJobIdUtils.parseContextKey(parentJobId), parentJobId, expectedSequence).marshal(); assertThat(checkJobConfig.getJobId(), is(expectCheckJobId)); @@ -80,11 +85,11 @@ void assertDropByParentJobId() { GovernanceRepositoryAPI repositoryAPI = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()); int expectedSequence = 1; for (int i = 0; i < 3; i++) { - String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, + String checkJobId = jobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, parentJobConfig.getSourceDatabaseType(), parentJobConfig.getTargetDatabaseType())); ConsistencyCheckJobItemContext checkJobItemContext = new ConsistencyCheckJobItemContext( new ConsistencyCheckJobConfiguration(checkJobId, parentJobId, null, null, TypedSPILoader.getService(DatabaseType.class, "H2")), 0, JobStatus.FINISHED, null); - checkJobAPI.persistJobItemProgress(checkJobItemContext); + jobItemManager.persistProgress(checkJobItemContext); Map dataConsistencyCheckResult = Collections.singletonMap("t_order", new TableDataConsistencyCheckResult(true)); repositoryAPI.persistCheckJobResult(parentJobId, checkJobId, dataConsistencyCheckResult); Optional latestCheckJobId = repositoryAPI.getLatestCheckJobId(parentJobId); @@ -93,12 +98,12 @@ void assertDropByParentJobId() { } expectedSequence = 2; for (int i = 0; i < 2; i++) { - checkJobAPI.dropByParentJobId(parentJobId); + jobAPI.dropByParentJobId(parentJobId); Optional latestCheckJobId = repositoryAPI.getLatestCheckJobId(parentJobId); assertTrue(latestCheckJobId.isPresent()); assertThat(ConsistencyCheckJobId.parseSequence(latestCheckJobId.get()), is(expectedSequence--)); } - checkJobAPI.dropByParentJobId(parentJobId); + jobAPI.dropByParentJobId(parentJobId); Optional latestCheckJobId = repositoryAPI.getLatestCheckJobId(parentJobId); assertFalse(latestCheckJobId.isPresent()); } diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java index bbd98ed6fbd4d..84a4e55572e71 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java @@ -32,7 +32,9 @@ import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; +import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; +import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager; import org.apache.shardingsphere.data.pipeline.core.metadata.PipelineDataSourcePersistService; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; @@ -91,6 +93,10 @@ class MigrationJobAPITest { private static PipelineJobManager jobManager; + private static InventoryIncrementalJobManager inventoryIncrementalJobManager; + + private static PipelineJobItemManager jobItemManager; + private static DatabaseType databaseType; @BeforeAll @@ -98,6 +104,8 @@ static void beforeClass() { PipelineContextUtils.mockModeConfigAndContextManager(); jobAPI = new MigrationJobAPI(); jobManager = new PipelineJobManager(jobAPI); + inventoryIncrementalJobManager = new InventoryIncrementalJobManager(jobAPI); + jobItemManager = new PipelineJobItemManager<>(jobAPI.getYamlJobItemProgressSwapper()); String jdbcUrl = "jdbc:h2:mem:test_ds_0;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL"; databaseType = DatabaseTypeFactory.get(jdbcUrl); Map props = new HashMap<>(); @@ -142,7 +150,7 @@ void assertStartOrStopById() { void assertRollback() throws SQLException { Optional jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration()); assertTrue(jobId.isPresent()); - MigrationJobConfiguration jobConfig = jobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId.get())); + MigrationJobConfiguration jobConfig = jobManager.getJobConfiguration(jobId.get()); initTableData(jobConfig); PipelineDistributedBarrier mockBarrier = mock(PipelineDistributedBarrier.class); when(PipelineDistributedBarrier.getInstance(any())).thenReturn(mockBarrier); @@ -154,7 +162,7 @@ void assertRollback() throws SQLException { void assertCommit() { Optional jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration()); assertTrue(jobId.isPresent()); - MigrationJobConfiguration jobConfig = jobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId.get())); + MigrationJobConfiguration jobConfig = jobManager.getJobConfiguration(jobId.get()); initTableData(jobConfig); PipelineDistributedBarrier mockBarrier = mock(PipelineDistributedBarrier.class); when(PipelineDistributedBarrier.getInstance(any())).thenReturn(mockBarrier); @@ -167,7 +175,7 @@ void assertGetProgress() { MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration(); Optional jobId = jobManager.start(jobConfig); assertTrue(jobId.isPresent()); - Map jobProgressMap = jobAPI.getJobProgress(jobConfig); + Map jobProgressMap = inventoryIncrementalJobManager.getJobProgress(jobConfig); assertThat(jobProgressMap.size(), is(1)); } @@ -187,7 +195,7 @@ void assertDataConsistencyCheck() { @Test void assertAggregateEmptyDataConsistencyCheckResults() { - assertThrows(IllegalArgumentException.class, () -> jobAPI.aggregateDataConsistencyCheckResults("foo_job", Collections.emptyMap())); + assertThrows(IllegalArgumentException.class, () -> inventoryIncrementalJobManager.aggregateDataConsistencyCheckResults("foo_job", Collections.emptyMap())); } @Test @@ -195,7 +203,7 @@ void assertAggregateDifferentDataConsistencyCheckResults() { Map checkResults = new LinkedHashMap<>(2, 1F); checkResults.put("foo_tbl", new TableDataConsistencyCheckResult(true)); checkResults.put("bar_tbl", new TableDataConsistencyCheckResult(false)); - assertFalse(jobAPI.aggregateDataConsistencyCheckResults("foo_job", checkResults)); + assertFalse(inventoryIncrementalJobManager.aggregateDataConsistencyCheckResults("foo_job", checkResults)); } @Test @@ -203,7 +211,7 @@ void assertAggregateSameDataConsistencyCheckResults() { Map checkResults = new LinkedHashMap<>(2, 1F); checkResults.put("foo_tbl", new TableDataConsistencyCheckResult(true)); checkResults.put("bar_tbl", new TableDataConsistencyCheckResult(true)); - assertTrue(jobAPI.aggregateDataConsistencyCheckResults("foo_job", checkResults)); + assertTrue(inventoryIncrementalJobManager.aggregateDataConsistencyCheckResults("foo_job", checkResults)); } @Test @@ -212,9 +220,9 @@ void assertSwitchClusterConfigurationSucceed() { Optional jobId = jobManager.start(jobConfig); assertTrue(jobId.isPresent()); MigrationJobItemContext jobItemContext = PipelineContextUtils.mockMigrationJobItemContext(jobConfig); - jobAPI.persistJobItemProgress(jobItemContext); - jobAPI.updateJobItemStatus(jobId.get(), jobItemContext.getShardingItem(), JobStatus.EXECUTE_INVENTORY_TASK); - Map progress = jobAPI.getJobProgress(jobConfig); + jobItemManager.persistProgress(jobItemContext); + jobItemManager.updateStatus(jobId.get(), jobItemContext.getShardingItem(), JobStatus.EXECUTE_INVENTORY_TASK); + Map progress = inventoryIncrementalJobManager.getJobProgress(jobConfig); for (Entry entry : progress.entrySet()) { assertThat(entry.getValue().getStatus(), is(JobStatus.EXECUTE_INVENTORY_TASK)); } @@ -245,9 +253,9 @@ private void initTableData(final DataSource pipelineDataSource) throws SQLExcept void assertRenewJobStatus() { final MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration(); MigrationJobItemContext jobItemContext = PipelineContextUtils.mockMigrationJobItemContext(jobConfig); - jobAPI.persistJobItemProgress(jobItemContext); - jobAPI.updateJobItemStatus(jobConfig.getJobId(), 0, JobStatus.FINISHED); - Optional actual = jobAPI.getJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem()); + jobItemManager.persistProgress(jobItemContext); + jobItemManager.updateStatus(jobConfig.getJobId(), 0, JobStatus.FINISHED); + Optional actual = jobItemManager.getProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem()); assertTrue(actual.isPresent()); assertThat(actual.get().getStatus(), is(JobStatus.FINISHED)); } @@ -277,7 +285,7 @@ void assertCreateJobConfig() throws SQLException { initIntPrimaryEnvironment(); SourceTargetEntry sourceTargetEntry = new SourceTargetEntry("logic_db", new DataNode("ds_0", "t_order"), "t_order"); String jobId = jobAPI.createJobAndStart(PipelineContextUtils.getContextKey(), new MigrateTableStatement(Collections.singletonList(sourceTargetEntry), "logic_db")); - MigrationJobConfiguration actual = jobAPI.getJobConfiguration(PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId)); + MigrationJobConfiguration actual = jobManager.getJobConfiguration(jobId); assertThat(actual.getTargetDatabaseName(), is("logic_db")); List dataNodeLines = actual.getJobShardingDataNodes(); assertThat(dataNodeLines.size(), is(1)); @@ -318,7 +326,7 @@ void assertGetJobItemInfosAtBegin() { yamlJobItemProgress.setStatus(JobStatus.RUNNING.name()); yamlJobItemProgress.setSourceDatabaseType("MySQL"); PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId.get(), 0, YamlEngine.marshal(yamlJobItemProgress)); - List jobItemInfos = jobAPI.getJobItemInfos(jobId.get()); + List jobItemInfos = inventoryIncrementalJobManager.getJobItemInfos(jobId.get()); assertThat(jobItemInfos.size(), is(1)); InventoryIncrementalJobItemInfo jobItemInfo = jobItemInfos.get(0); assertThat(jobItemInfo.getJobItemProgress().getStatus(), is(JobStatus.RUNNING)); @@ -335,7 +343,7 @@ void assertGetJobItemInfosAtIncrementTask() { yamlJobItemProgress.setProcessedRecordsCount(100); yamlJobItemProgress.setInventoryRecordsCount(50); PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId.get(), 0, YamlEngine.marshal(yamlJobItemProgress)); - List jobItemInfos = jobAPI.getJobItemInfos(jobId.get()); + List jobItemInfos = inventoryIncrementalJobManager.getJobItemInfos(jobId.get()); InventoryIncrementalJobItemInfo jobItemInfo = jobItemInfos.get(0); assertThat(jobItemInfo.getJobItemProgress().getStatus(), is(JobStatus.EXECUTE_INCREMENTAL_TASK)); assertThat(jobItemInfo.getInventoryFinishedPercentage(), is(100)); diff --git a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/keygen/RewriteKeyGenerateAlgorithmFixture.java b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/keygen/RewriteKeyGenerateAlgorithmFixture.java index de7c640080276..40820915117e5 100644 --- a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/keygen/RewriteKeyGenerateAlgorithmFixture.java +++ b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/keygen/RewriteKeyGenerateAlgorithmFixture.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.test.it.rewrite.fixture.keygen; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm; public final class RewriteKeyGenerateAlgorithmFixture implements KeyGenerateAlgorithm { diff --git a/test/it/rewriter/src/test/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm b/test/it/rewriter/src/test/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm similarity index 100% rename from test/it/rewriter/src/test/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm rename to test/it/rewriter/src/test/resources/META-INF/services/org.apache.shardingsphere.keygen.core.algorithm.KeyGenerateAlgorithm