Skip to content

Commit

Permalink
Fix sonar issue (#29204)
Browse files Browse the repository at this point in the history
* Remove useless codes

* Remove useless codes

* Fix sonar issue
  • Loading branch information
terrymanu authored Nov 25, 2023
1 parent 643b486 commit 139f6db
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,15 @@

package org.apache.shardingsphere.infra.util.datetime;

import lombok.AccessLevel;
import lombok.NoArgsConstructor;

import java.time.format.DateTimeFormatter;

/**
* Standard date time formatter.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class StandardDateTimeFormatter {

private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,13 @@
import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeEntry;
import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine;
import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLineConvertUtils;
import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceFactory;
import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper;
import org.apache.shardingsphere.data.pipeline.common.datasource.yaml.YamlPipelineDataSourceConfiguration;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveQualifiedTable;
import org.apache.shardingsphere.data.pipeline.common.metadata.loader.PipelineSchemaUtils;
import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobInfo;
import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobMetaData;
import org.apache.shardingsphere.data.pipeline.common.spi.algorithm.JobRateLimitAlgorithm;
import org.apache.shardingsphere.data.pipeline.common.sqlbuilder.PipelineCommonSQLBuilder;
import org.apache.shardingsphere.data.pipeline.common.util.ShardingColumnsExtractor;
import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext;
import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker;
Expand All @@ -54,7 +51,6 @@
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.option.TransmissionJobOption;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobConfigurationManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.TransmissionJobManager;
Expand Down Expand Up @@ -87,11 +83,7 @@
import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine;
import org.apache.shardingsphere.migration.distsql.statement.MigrateTableStatement;
import org.apache.shardingsphere.migration.distsql.statement.pojo.SourceTargetEntry;
import org.apache.shardingsphere.mode.manager.ContextManager;

import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
Expand Down Expand Up @@ -298,40 +290,6 @@ public Optional<String> getToBeStoppedPreviousJobType() {
return Optional.of("CONSISTENCY_CHECK");
}

private void dropCheckJobs(final String jobId) {
Collection<String> checkJobIds = PipelineAPIFactory.getPipelineGovernanceFacade(PipelineJobIdUtils.parseContextKey(jobId)).getJobFacade().getCheck().listCheckJobIds(jobId);
if (checkJobIds.isEmpty()) {
return;
}
for (String each : checkJobIds) {
try {
new PipelineJobManager(this).drop(each);
// CHECKSTYLE:OFF
} catch (final RuntimeException ex) {
// CHECKSTYLE:ON
log.info("drop check job failed, check job id: {}, error: {}", each, ex.getMessage());
}
}
}

private void cleanTempTableOnRollback(final String jobId) throws SQLException {
MigrationJobConfiguration jobConfig = new PipelineJobConfigurationManager(this).getJobConfiguration(jobId);
PipelineCommonSQLBuilder pipelineSQLBuilder = new PipelineCommonSQLBuilder(jobConfig.getTargetDatabaseType());
TableAndSchemaNameMapper mapping = new TableAndSchemaNameMapper(jobConfig.getTargetTableSchemaMap());
try (
PipelineDataSourceWrapper dataSource = PipelineDataSourceFactory.newInstance(jobConfig.getTarget());
Connection connection = dataSource.getConnection()) {
for (String each : jobConfig.getTargetTableNames()) {
String targetSchemaName = mapping.getSchemaName(each);
String sql = pipelineSQLBuilder.buildDropSQL(targetSchemaName, each);
log.info("cleanTempTableOnRollback, targetSchemaName={}, targetTableName={}, sql={}", targetSchemaName, each, sql);
try (Statement statement = connection.createStatement()) {
statement.execute(sql);
}
}
}
}

/**
* Add migration source resources.
*
Expand Down Expand Up @@ -410,19 +368,6 @@ private String getStandardProperty(final Map<String, Object> standardProps, fina
return "";
}

/**
* Refresh table metadata.
*
* @param jobId job id
* @param databaseName database name
*/
public void refreshTableMetadata(final String jobId, final String databaseName) {
// TODO use origin database name now, wait reloadDatabaseMetaData fix case-sensitive probelm
ContextManager contextManager = PipelineContextManager.getContext(PipelineJobIdUtils.parseContextKey(jobId)).getContextManager();
ShardingSphereDatabase database = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName);
contextManager.reloadDatabaseMetaData(database.getName());
}

@Override
public Class<MigrationJob> getJobClass() {
return MigrationJob.class;
Expand Down

0 comments on commit 139f6db

Please sign in to comment.