Skip to content

Commit

Permalink
Rename PipelineJobRegistry (#29332)
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu authored Dec 8, 2023
1 parent 61ee704 commit bcc4a28
Show file tree
Hide file tree
Showing 10 changed files with 133 additions and 105 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@
import java.util.concurrent.ConcurrentHashMap;

/**
* Pipeline job center.
* Pipeline job registry.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class PipelineJobCenter {
public final class PipelineJobRegistry {

private static final Map<String, PipelineJob> JOBS = new ConcurrentHashMap<>();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.data.pipeline.core.context.PipelineJobItemContext;
import org.apache.shardingsphere.data.pipeline.core.job.type.PipelineJobType;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager;
import org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorThreadFactoryBuilder;
Expand Down Expand Up @@ -121,7 +121,7 @@ private static synchronized void persist(final String jobId, final int shardingI
&& !persistContext.getHasNewEvents().get()) {
return;
}
Optional<PipelineJobItemContext> jobItemContext = PipelineJobCenter.getItemContext(jobId, shardingItem);
Optional<PipelineJobItemContext> jobItemContext = PipelineJobRegistry.getItemContext(jobId, shardingItem);
if (!jobItemContext.isPresent()) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.shardingsphere.data.pipeline.core.metadata.node.config.processor.JobConfigurationChangedProcessor;
import org.apache.shardingsphere.data.pipeline.core.util.PipelineDistributedBarrier;
import org.apache.shardingsphere.data.pipeline.core.job.AbstractPipelineJob;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.api.PipelineAPIFactory;
import org.apache.shardingsphere.elasticjob.api.JobConfiguration;
Expand All @@ -47,8 +47,8 @@ public void process(final Type eventType, final JobConfiguration jobConfig) {
}
String jobId = jobConfig.getJobName();
if (disabled || deleted) {
Collection<Integer> jobItems = PipelineJobCenter.getShardingItems(jobId);
PipelineJobCenter.stop(jobId);
Collection<Integer> jobItems = PipelineJobRegistry.getShardingItems(jobId);
PipelineJobRegistry.stop(jobId);
if (disabled) {
onDisabled(jobConfig, jobItems);
}
Expand All @@ -57,7 +57,7 @@ public void process(final Type eventType, final JobConfiguration jobConfig) {
switch (eventType) {
case ADDED:
case UPDATED:
if (PipelineJobCenter.isExisting(jobId)) {
if (PipelineJobRegistry.isExisting(jobId)) {
log.info("{} added to executing jobs failed since it already exists", jobId);
} else {
executeJob(jobConfig);
Expand All @@ -81,7 +81,7 @@ protected void onDisabled(final JobConfiguration jobConfig, final Collection<Int
protected void executeJob(final JobConfiguration jobConfig) {
String jobId = jobConfig.getJobName();
AbstractPipelineJob job = buildPipelineJob(jobId);
PipelineJobCenter.add(jobId, job);
PipelineJobRegistry.add(jobId, job);
OneOffJobBootstrap oneOffJobBootstrap = new OneOffJobBootstrap(PipelineAPIFactory.getRegistryCenter(PipelineJobIdUtils.parseContextKey(jobId)), job, jobConfig);
job.setJobBootstrap(oneOffJobBootstrap);
oneOffJobBootstrap.execute();
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.data.pipeline.core.job;

import org.apache.shardingsphere.data.pipeline.core.context.PipelineJobItemContext;
import org.apache.shardingsphere.data.pipeline.core.task.runner.PipelineTasksRunner;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;

import java.util.Arrays;
import java.util.Collections;
import java.util.Optional;

import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;

@ExtendWith(MockitoExtension.class)
class PipelineJobRegistryTest {

@Mock
private PipelineJob job;

@BeforeEach
void setUp() {
PipelineJobRegistry.add("foo_job", job);
}

@AfterEach
void reset() {
PipelineJobRegistry.stop("foo_job");
}

@Test
void assertAdd() {
assertFalse(PipelineJobRegistry.isExisting("bar_job"));
PipelineJobRegistry.add("bar_job", mock(PipelineJob.class));
assertTrue(PipelineJobRegistry.isExisting("bar_job"));
}

@Test
void assertIsExisting() {
assertTrue(PipelineJobRegistry.isExisting("foo_job"));
}

@Test
void assertGet() {
assertThat(PipelineJobRegistry.get("foo_job"), is(job));
}

@Test
void assertStop() {
PipelineJobRegistry.stop("foo_job");
verify(job).stop();
assertFalse(PipelineJobRegistry.isExisting("foo_job"));
}

@Test
void assertGetExistedItemContext() {
PipelineJobItemContext jobItemContext = mock(PipelineJobItemContext.class);
PipelineTasksRunner tasksRunner = mock(PipelineTasksRunner.class);
when(tasksRunner.getJobItemContext()).thenReturn(jobItemContext);
when(job.getTasksRunner(anyInt())).thenReturn(Optional.of(tasksRunner));
Optional<PipelineJobItemContext> actual = PipelineJobRegistry.getItemContext("foo_job", 1);
assertTrue(actual.isPresent());
assertThat(actual.get(), is(jobItemContext));
}

@Test
void assertGetNotExistedItemContext() {
assertThat(PipelineJobRegistry.getItemContext("bar_job", 1), is(Optional.empty()));
}

@Test
void assertGetExistedShardingItems() {
when(job.getShardingItems()).thenReturn(Arrays.asList(1, 2, 3));
assertThat(PipelineJobRegistry.getShardingItems("foo_job"), is(Arrays.asList(1, 2, 3)));
}

@Test
void assertGetNotExistedShardingItems() {
assertThat(PipelineJobRegistry.getShardingItems("bar_job"), is(Collections.emptyList()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper;
import org.apache.shardingsphere.data.pipeline.core.job.AbstractPipelineJob;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.api.PipelineAPIFactory;
import org.apache.shardingsphere.data.pipeline.core.job.api.TransmissionJobAPI;
Expand Down Expand Up @@ -187,7 +187,7 @@ private void prepare(final Collection<CDCJobItemContext> jobItemContexts) {
private void processFailed(final String jobId, final int shardingItem, final Exception ex) {
log.error("job execution failed, {}-{}", jobId, shardingItem, ex);
PipelineAPIFactory.getPipelineGovernanceFacade(PipelineJobIdUtils.parseContextKey(jobId)).getJobItemFacade().getErrorMessage().update(jobId, shardingItem, ex);
PipelineJobCenter.stop(jobId);
PipelineJobRegistry.stop(jobId);
jobAPI.disable(jobId);
}

Expand Down Expand Up @@ -268,7 +268,7 @@ public void onFailure(final Throwable throwable) {
CDCSocketSink cdcSink = (CDCSocketSink) jobItemContext.getSink();
cdcSink.getChannel().writeAndFlush(CDCResponseUtils.failed("", "", throwable.getMessage()));
}
PipelineJobCenter.stop(jobId);
PipelineJobRegistry.stop(jobId);
jobAPI.disable(jobId);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.DumperCommonContext;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.api.PipelineAPIFactory;
import org.apache.shardingsphere.data.pipeline.core.job.api.TransmissionJobAPI;
Expand Down Expand Up @@ -213,7 +213,7 @@ private static TransmissionJobItemProgress getTransmissionJobItemProgress(final
*/
public void start(final String jobId, final PipelineSink sink) {
CDCJob job = new CDCJob(jobId, sink);
PipelineJobCenter.add(jobId, job);
PipelineJobRegistry.add(jobId, job);
enable(jobId);
JobConfigurationPOJO jobConfigPOJO = PipelineJobIdUtils.getElasticJobConfigurationPOJO(jobId);
OneOffJobBootstrap oneOffJobBootstrap = new OneOffJobBootstrap(PipelineAPIFactory.getRegistryCenter(PipelineJobIdUtils.parseContextKey(jobId)), job, jobConfigPOJO.toJobConfiguration());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.InventoryDumperContext;
import org.apache.shardingsphere.data.pipeline.core.ingest.position.IngestPosition;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager;
import org.apache.shardingsphere.data.pipeline.core.preparer.InventoryTaskSplitter;
import org.apache.shardingsphere.data.pipeline.core.preparer.PipelineJobPreparerUtils;
Expand Down Expand Up @@ -92,7 +92,7 @@ private void initTasks0(final CDCJobItemContext jobItemContext, final AtomicBool
jobItemManager.persistProgress(jobItemContext);
}
if (jobItemContext.isStopping()) {
PipelineJobCenter.stop(jobItemContext.getJobId());
PipelineJobRegistry.stop(jobItemContext.getJobId());
return;
}
initIncrementalPosition(jobItemContext);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
import org.apache.shardingsphere.data.pipeline.core.context.PipelineContextManager;
import org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobNotFoundException;
import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.api.TransmissionJobAPI;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobConfigurationManager;
import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData;
Expand Down Expand Up @@ -134,8 +134,8 @@ public CDCResponse streamData(final String requestId, final StreamDataRequestBod
public void startStreaming(final String jobId, final CDCConnectionContext connectionContext, final Channel channel) {
CDCJobConfiguration cdcJobConfig = jobConfigManager.getJobConfiguration(jobId);
ShardingSpherePreconditions.checkNotNull(cdcJobConfig, () -> new PipelineJobNotFoundException(jobId));
if (PipelineJobCenter.isExisting(jobId)) {
PipelineJobCenter.stop(jobId);
if (PipelineJobRegistry.isExisting(jobId)) {
PipelineJobRegistry.stop(jobId);
}
ShardingSphereDatabase database = PipelineContextManager.getProxyContext().getContextManager().getMetaDataContexts().getMetaData().getDatabase(cdcJobConfig.getDatabaseName());
jobAPI.start(jobId, new CDCSocketSink(channel, database, cdcJobConfig.getSchemaTableNames()));
Expand All @@ -153,13 +153,13 @@ public void stopStreaming(final String jobId, final ChannelId channelId) {
log.warn("job id is null or empty, ignored");
return;
}
CDCJob job = (CDCJob) PipelineJobCenter.get(jobId);
CDCJob job = (CDCJob) PipelineJobRegistry.get(jobId);
if (null == job) {
return;
}
if (job.getSink().identifierMatched(channelId)) {
log.info("close CDC job, channel id: {}", channelId);
PipelineJobCenter.stop(jobId);
PipelineJobRegistry.stop(jobId);
jobAPI.disable(jobId);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.Dumper;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.IncrementalDumperContext;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.InventoryDumperContext;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobCenter;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobRegistry;
import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.api.PipelineAPIFactory;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager;
Expand Down Expand Up @@ -97,12 +97,12 @@ public void prepare(final MigrationJobItemContext jobItemContext) throws SQLExce
() -> new UnsupportedSQLOperationException("Migration inventory dumper only support StandardPipelineDataSourceConfiguration"));
PipelineJobPreparerUtils.checkSourceDataSource(jobItemContext.getJobConfig().getSourceDatabaseType(), Collections.singleton(jobItemContext.getSourceDataSource()));
if (jobItemContext.isStopping()) {
PipelineJobCenter.stop(jobItemContext.getJobId());
PipelineJobRegistry.stop(jobItemContext.getJobId());
return;
}
prepareAndCheckTargetWithLock(jobItemContext);
if (jobItemContext.isStopping()) {
PipelineJobCenter.stop(jobItemContext.getJobId());
PipelineJobRegistry.stop(jobItemContext.getJobId());
return;
}
boolean isIncrementalSupported = PipelineJobPreparerUtils.isIncrementalSupported(jobItemContext.getJobConfig().getSourceDatabaseType());
Expand All @@ -113,7 +113,7 @@ public void prepare(final MigrationJobItemContext jobItemContext) throws SQLExce
if (isIncrementalSupported) {
initIncrementalTasks(jobItemContext);
if (jobItemContext.isStopping()) {
PipelineJobCenter.stop(jobItemContext.getJobId());
PipelineJobRegistry.stop(jobItemContext.getJobId());
return;
}
}
Expand Down

0 comments on commit bcc4a28

Please sign in to comment.