From 89d3dca568aec6e1b19fcb9c7cf63e584947ad64 Mon Sep 17 00:00:00 2001 From: aaryaattrey Date: Wed, 4 Oct 2023 13:47:56 +0530 Subject: [PATCH 1/3] e2e tests: kafka, postgres, mysql --- connectors/kafka/README.md | 2 +- connectors/kafka/producer.py | 2 +- connectors/postgres/README.md | 4 ++-- connectors/postgres/dozer-config.yaml | 2 +- package.json | 4 ++-- test/connectors/initKafka.sh | 20 ++++++++++++++++++ test/connectors/initMySQL.sh | 5 +++++ test/connectors/initPostgres.sh | 7 +++++++ test/connectors/mysql.ts | 30 +++++++++++++++++++++++++++ test/connectors/postgres.ts | 26 +++++++++++++++++++++++ test/connectors/runkafka.ts | 27 ++++++++++++++++++++++++ test/helper.ts | 4 ++-- 12 files changed, 124 insertions(+), 9 deletions(-) create mode 100755 test/connectors/initKafka.sh create mode 100755 test/connectors/initMySQL.sh create mode 100755 test/connectors/initPostgres.sh create mode 100644 test/connectors/mysql.ts create mode 100644 test/connectors/postgres.ts create mode 100644 test/connectors/runkafka.ts diff --git a/connectors/kafka/README.md b/connectors/kafka/README.md index bdcdadc8..3d54dc5d 100644 --- a/connectors/kafka/README.md +++ b/connectors/kafka/README.md @@ -54,7 +54,7 @@ The producer will start generating sample data and publish it to Redpanda. 2. Run the following command to start Dozer: ```bash -dozer -c dozer-config.yaml +dozer run -c dozer-config.yaml ``` Dozer will ingest the data from Redpanda and perform the specified operations based on the configuration. diff --git a/connectors/kafka/producer.py b/connectors/kafka/producer.py index 5eab2516..8d77c46c 100644 --- a/connectors/kafka/producer.py +++ b/connectors/kafka/producer.py @@ -13,7 +13,7 @@ producer = KafkaProducer(bootstrap_servers=kafka_bootstrap_servers) # Generate mock transaction data and send it to the Kafka topic -for index in range(10000000): +for index in range(10): # Generate mock transaction data using the Faker library transaction_data = { 'id': index, diff --git a/connectors/postgres/README.md b/connectors/postgres/README.md index f3b766d7..db8f49de 100644 --- a/connectors/postgres/README.md +++ b/connectors/postgres/README.md @@ -16,13 +16,13 @@ docker-compose up Running Dozer ``` -dozer +dozer run ``` That's all to it. You have APIs instantly available over REST and gRPC. ``` - dozer + dozer run ____ ___ __________ ____ | _ \ / _ \__ / ____| _ \ diff --git a/connectors/postgres/dozer-config.yaml b/connectors/postgres/dozer-config.yaml index d85b63ac..d7c11209 100644 --- a/connectors/postgres/dozer-config.yaml +++ b/connectors/postgres/dozer-config.yaml @@ -7,7 +7,7 @@ connections: password: postgres host: localhost port: 5433 - database: film + database: pagila sources: - name: actors diff --git a/package.json b/package.json index e0f5d47d..51b9cf83 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "devDependencies": { "@types/google-protobuf": "^3.15.7", - "@types/mocha": "^10.0.1", + "@types/mocha": "^10.0.2", "@types/node": "^20.6.5", "@typescript-eslint/eslint-plugin": "^6.7.2", "@typescript-eslint/parser": "^6.7.2", @@ -50,4 +50,4 @@ ], "recursive": "test/**/*.js" } -} \ No newline at end of file +} diff --git a/test/connectors/initKafka.sh b/test/connectors/initKafka.sh new file mode 100755 index 00000000..9d7cf82d --- /dev/null +++ b/test/connectors/initKafka.sh @@ -0,0 +1,20 @@ +#!/bin/sh +BASEDIR=$(dirname "$0") +cd ${BASEDIR}/../../connectors/kafka + +#start RedPanda +docker-compose up -d + +#Register schema +curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" --data '{"schema": "{\"type\":\"record\",\"name\":\"transaction\",\"namespace\":\"dozer.samples\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"}, {\"name\":\"customer_id\",\"type\":\"int\"},{\"name\":\"amount\",\"type\":\"float\"},{\"name\":\"location\",\"type\":\"string\"},{\"name\":\"provider\",\"type\":\"string\"}]}"}' http://localhost:18081/subjects/transactions-value/versions + + +curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" --data '{"schema": "{\"type\":\"record\",\"name\":\"transactions\",\"namespace\":\"dozer.samples\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"}]}"}' http://localhost:18081/subjects/transactions-key/versions + +#Run producer script +pip install kafka-python +pip install Faker +python producer.py + + + diff --git a/test/connectors/initMySQL.sh b/test/connectors/initMySQL.sh new file mode 100755 index 00000000..b30f4411 --- /dev/null +++ b/test/connectors/initMySQL.sh @@ -0,0 +1,5 @@ +#!/bin/sh +BASEDIR=$(dirname "$0") +cd ${BASEDIR}/../../ + +docker-compose -f ./connectors/mysql/docker-compose.yml up -d \ No newline at end of file diff --git a/test/connectors/initPostgres.sh b/test/connectors/initPostgres.sh new file mode 100755 index 00000000..359f6c89 --- /dev/null +++ b/test/connectors/initPostgres.sh @@ -0,0 +1,7 @@ +#!/bin/sh +BASEDIR=$(dirname "$0") +cd ${BASEDIR}/../../connectors/postgres + +sh download.sh + +docker-compose up -d \ No newline at end of file diff --git a/test/connectors/mysql.ts b/test/connectors/mysql.ts new file mode 100644 index 00000000..965c6ed7 --- /dev/null +++ b/test/connectors/mysql.ts @@ -0,0 +1,30 @@ +import { execSync } from 'child_process'; +import path from 'path'; +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/mysql'; + +describe('Connectors: MySQL', async () => { + beforeEach(async () => { + process.chdir('../../'); // go to root + console.log(`Starting directory: ${process.cwd()}`); + + // Download init.sql and setup docker image + execSync(`${__dirname}/initMySQL.sh`, { stdio: 'inherit' }); + + // navigate to test path + const baseDir = path.join(__dirname, '../../'); + const fullPath = path.join(baseDir, TEST_PATH); + process.chdir(fullPath); + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + await assertEndpointsWithRetry(); + dozer.kill(9); + console.log('Killed dozer mysql'); + }); +}); diff --git a/test/connectors/postgres.ts b/test/connectors/postgres.ts new file mode 100644 index 00000000..7d5f6ec0 --- /dev/null +++ b/test/connectors/postgres.ts @@ -0,0 +1,26 @@ +import { execSync } from 'child_process'; +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/postgres'; + +describe('Connectors: Postgres', async () => { + beforeEach(async () => { + process.chdir('../../'); // go to root + console.log(`Starting directory: ${process.cwd()}`); + + // Download init.sql and setup docker image + execSync(`${__dirname}/initPostgres.sh`, { stdio: 'inherit' }); + + process.chdir(TEST_PATH); + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + await assertEndpointsWithRetry(); + dozer.kill(9); + console.log('Killed dozer postgres'); + }); +}); diff --git a/test/connectors/runkafka.ts b/test/connectors/runkafka.ts new file mode 100644 index 00000000..83c2b47e --- /dev/null +++ b/test/connectors/runkafka.ts @@ -0,0 +1,27 @@ +import { execSync } from 'child_process'; +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/kafka'; + +describe('Connectors: Kafka', async () => { + beforeEach(async () => { + process.chdir('../../'); // go to root + console.log(`Starting directory: ${process.cwd()}`); + + // Download init.sql and setup docker image + execSync(`${__dirname}/initKafka.sh`, { stdio: 'inherit' }); + + process.chdir(TEST_PATH); + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + console.log('Dozer started'); + await assertEndpointsWithRetry(); + dozer.kill(9); + console.log('Killed dozer Kafka'); + }); +}); diff --git a/test/helper.ts b/test/helper.ts index 15ec019b..88d04568 100644 --- a/test/helper.ts +++ b/test/helper.ts @@ -51,8 +51,8 @@ export async function checkEndpoints() { } export async function assertEndpointsWithRetry( - maxRetries: number = 10, - retryInterval: number = 500, + maxRetries: number = 60, + retryInterval: number = 1000, ) { let i = 0; let endpointsReady = false; From 9de10340412c45e3195524fe29ea32cae0fac221 Mon Sep 17 00:00:00 2001 From: aaryaattrey Date: Wed, 4 Oct 2023 23:32:18 +0530 Subject: [PATCH 2/3] updated pnpm-lock --- pnpm-lock.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3538ab56..90b3fc1d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -29,8 +29,8 @@ devDependencies: specifier: ^3.15.7 version: 3.15.7 '@types/mocha': - specifier: ^10.0.1 - version: 10.0.1 + specifier: ^10.0.2 + version: 10.0.2 '@types/node': specifier: ^20.6.5 version: 20.6.5 @@ -303,8 +303,8 @@ packages: resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} dev: true - /@types/mocha@10.0.1: - resolution: {integrity: sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==} + /@types/mocha@10.0.2: + resolution: {integrity: sha512-NaHL0+0lLNhX6d9rs+NSt97WH/gIlRHmszXbQ/8/MV/eVcFNdeJ/GYhrFuUc8K7WuPhRhTSdMkCp8VMzhUq85w==} dev: true /@types/node@20.6.5: From 5124b28076e8e04f0bb0790d96952610c2a08f6a Mon Sep 17 00:00:00 2001 From: aaryaattrey Date: Thu, 5 Oct 2023 02:14:58 +0530 Subject: [PATCH 3/3] Remove name field --- connectors/kafka/docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/connectors/kafka/docker-compose.yml b/connectors/kafka/docker-compose.yml index 35090b81..839027f2 100644 --- a/connectors/kafka/docker-compose.yml +++ b/connectors/kafka/docker-compose.yml @@ -1,5 +1,4 @@ version: "3.7" -name: redpanda-quickstart networks: redpanda_network: driver: bridge