diff --git a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java index 0ee8e65ebf094..7e9f9eb5dd31d 100644 --- a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java +++ b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/sqlbuilder/OpenGaussPipelineSQLBuilder.java @@ -69,7 +69,7 @@ public Collection buildCreateTableSQLs(final DataSource dataSource, fina Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(String.format("SELECT * FROM pg_get_tabledef('%s.%s')", schemaName, tableName))) { if (resultSet.next()) { - // TODO use ";" to split is not always correct + // TODO use ";" to split is not always correct if return value's comments contains ";" return Arrays.asList(resultSet.getString("pg_get_tabledef").split(";")); } } diff --git a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/PostgreSQLPipelineSQLBuilder.java b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/PostgreSQLPipelineSQLBuilder.java index 2991ee4377dbd..7192526c0b676 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/PostgreSQLPipelineSQLBuilder.java +++ b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/PostgreSQLPipelineSQLBuilder.java @@ -72,7 +72,12 @@ public Optional buildEstimatedCountSQL(final String qualifiedTableName) return Optional.of(String.format("SELECT reltuples::integer FROM pg_class WHERE oid='%s'::regclass::oid;", qualifiedTableName)); } - // TODO support partitions etc. + // TODO support partitions etc. If user use partition table, after sharding, the partition definition will not be needed. So we need to remove it after supported. + @Override + public Optional buildCRC32SQL(final String qualifiedTableName, final String columnName) { + return Optional.of(String.format("SELECT pg_catalog.pg_checksum_table('%s', true)", qualifiedTableName)); + } + @Override public Collection buildCreateTableSQLs(final DataSource dataSource, final String schemaName, final String tableName) throws SQLException { try (Connection connection = dataSource.getConnection()) { @@ -81,7 +86,7 @@ public Collection buildCreateTableSQLs(final DataSource dataSource, fina Map materials = loadMaterials(tableName, schemaName, connection, majorVersion, minorVersion); String tableSQL = generateCreateTableSQL(majorVersion, minorVersion, materials); String indexSQL = generateCreateIndexSQL(connection, majorVersion, minorVersion, materials); - // TODO use ";" to split is not always correct + // TODO use ";" to split is not always correct if return value's comments contains ";" return Arrays.asList((tableSQL + System.lineSeparator() + indexSQL).trim().split(";")); } } diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java index 1b5daf02ed13d..236381a9cca86 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java @@ -285,7 +285,7 @@ public void commit(final String jobId) { } private void refreshTableMetadata(final String jobId, final String databaseName) { - // TODO use origin database name now, wait reloadDatabaseMetaData fix case-sensitive problem + // TODO use origin database name for now. It can be reduce metadata refresh scope after reloadDatabaseMetaData case-sensitive problem fixed. ContextManager contextManager = PipelineContextManager.getContext(PipelineJobIdUtils.parseContextKey(jobId)).getContextManager(); ShardingSphereDatabase database = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName); contextManager.getMetaDataContextManager().refreshTableMetaData(database);