From 78e7f22914aca7fe0d0a972d3363baea051b1991 Mon Sep 17 00:00:00 2001 From: Sagar Pingale <104326818+supergi0@users.noreply.github.com> Date: Wed, 4 Oct 2023 21:53:47 +0530 Subject: [PATCH] e2e tests for s3,snowflake,supabase and ethereum (#110) e2e tests for s3,snowflake,supabase and ethereum --- .github/workflows/ci.yaml | 103 +++++++++++++++++++++---- connectors/aws-s3/dozer-config.yaml | 13 ++-- connectors/ethereum/dozer-config.yml | 1 + connectors/snowflake/dozer-config.yaml | 4 - connectors/supabase/dozer-config.yaml | 4 - package.json | 2 +- test/connectors/ethereum.ts | 27 +++++++ test/connectors/local-storage.ts | 13 +++- test/connectors/s3.ts | 27 +++++++ test/connectors/snowflake.ts | 27 +++++++ test/connectors/supabase.ts | 27 +++++++ test/helper.ts | 9 ++- 12 files changed, 219 insertions(+), 38 deletions(-) create mode 100644 test/connectors/ethereum.ts create mode 100644 test/connectors/s3.ts create mode 100644 test/connectors/snowflake.ts create mode 100644 test/connectors/supabase.ts diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e12cdb64..ab78a99d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,4 +1,5 @@ name: CI + on: pull_request: branches: [main] @@ -8,12 +9,77 @@ concurrency: cancel-in-progress: true jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Install Protoc + uses: arduino/setup-protoc@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Rust cache + uses: swatinem/rust-cache@v2 + + - name: Clone Dozer + run: | + git clone http://github.com/getdozer/dozer.git + + - name: Restore Dozer cache + uses: actions/cache@v3 + with: + path: | + dozer/ + key: ${{ runner.os }}-dozer-${{ hashFiles('**/dozer/Cargo.toml') }} + restore-keys: | + ${{ runner.os }}-dozer- + + - name: Build Dozer binary + run: | + cd dozer + cargo build --features ethereum,snowflake,kafka --bin dozer + mkdir -p $GITHUB_WORKSPACE/dozer-bin + cp target/debug/dozer $GITHUB_WORKSPACE/dozer-bin/dozer + + - name: Archive Dozer binary + uses: actions/upload-artifact@v2 + with: + name: dozer-bin-artifact + path: dozer-bin/dozer + test: runs-on: ubuntu-latest + needs: build steps: - - uses: actions/checkout@v2 + - name: Checkout code + uses: actions/checkout@v2 - - uses: pnpm/action-setup@v2 + - name: Download Dozer binary artifact + uses: actions/download-artifact@v2 + with: + name: dozer-bin-artifact + path: dozer-bin + + - name: Install Protoc, Snowflake ODBC Driver and Python3 + run: | + curl -sLO https://github.com/protocolbuffers/protobuf/releases/download/v22.2/protoc-22.2-linux-x86_64.zip + unzip protoc-22.2-linux-x86_64.zip -d $HOME/.local + export PATH="$PATH:$HOME/.local/bin" + curl ${SNOWFLAKE_DRIVER_URL} -o snowflake_driver.deb && sudo dpkg -i snowflake_driver.deb + env: + SNOWFLAKE_DRIVER_URL: https://sfc-repo.snowflakecomputing.com/odbc/linux/2.25.7/snowflake-odbc-2.25.7.x86_64.deb + + - name: Setup Dozer binary + run: | + mkdir -p $HOME/.cargo/bin + cp dozer-bin/dozer $HOME/.cargo/bin/ + chmod +x $HOME/.cargo/bin/dozer + + - name: Install pnpm + uses: pnpm/action-setup@v2 with: version: 8 run_install: false @@ -23,8 +89,8 @@ jobs: run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV - - uses: actions/cache@v3 - name: Setup pnpm cache + - name: Restore pnpm cache + uses: actions/cache@v3 with: path: ${{ env.STORE_PATH }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} @@ -34,16 +100,21 @@ jobs: - name: Install dependencies run: pnpm install - - name: Install Dozer - run: | - # Install protoc - curl -sLO https://github.com/protocolbuffers/protobuf/releases/download/v22.2/protoc-22.2-linux-x86_64.zip - unzip protoc-22.2-linux-x86_64.zip -d $HOME/.local - export PATH="$PATH:$HOME/.local/bin" - - curl -sLO https://github.com/getdozer/dozer/releases/latest/download/dozer-linux-amd64.deb - sudo dpkg -i dozer-linux-amd64.deb - - name: Run tests - run: | - pnpm test + env: + AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }} + AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }} + AWS_REGION_S3: ${{ secrets.AWS_REGION_S3 }} + AWS_BUCKET_NAME: ${{ secrets.AWS_BUCKET_NAME }} + SN_SERVER: ${{ secrets.SN_SERVER }} + SN_USER: ${{ secrets.SN_USER }} + SN_PASSWORD: ${{ secrets.SN_PASSWORD }} + SN_DATABASE: ${{ secrets.SN_DATABASE }} + SN_WAREHOUSE: ${{ secrets.SN_WAREHOUSE }} + SN_ROLE: ${{ secrets.SN_ROLE }} + ETH_WSS_URL: ${{ secrets.ETH_WSS_URL }} + PG_HOST: ${{ secrets.PG_HOST }} + PG_USER: ${{ secrets.PG_USER }} + PG_PASSWORD: ${{ secrets.PG_PASSWORD }} + PG_DB: ${{ secrets.PG_DB }} + run: pnpm test diff --git a/connectors/aws-s3/dozer-config.yaml b/connectors/aws-s3/dozer-config.yaml index a0a652cb..a81667e9 100644 --- a/connectors/aws-s3/dozer-config.yaml +++ b/connectors/aws-s3/dozer-config.yaml @@ -1,17 +1,17 @@ app_name: aws-s3-sample version: 1 connections: - - config : !S3Storage + - config: !S3Storage details: - access_key_id: {{YOUR_ACCESS_KEY}} - secret_access_key: {{YOUR_SECRET_KEY}} - region: {{YOUR_REGION}} - bucket_name: {{YOUR_BUCKET_NAME}} + access_key_id: "{{AWS_ACCESS_KEY}}" + secret_access_key: "{{AWS_SECRET_KEY}}" + region: "{{AWS_REGION_S3}}" + bucket_name: "{{AWS_BUCKET_NAME}}" tables: - !Table name: stocks config: !CSV - path: . # path to files or folder inside a bucket + path: stock_data_monthly_100_rows #Change this to stock_data_monthly extension: .csv name: s3 @@ -62,6 +62,5 @@ endpoints: - name: lowest_daily_close path: /analysis/lowest_daily_close table_name: lowest_daily_close - # telemetry: # metrics: !Prometheus # You can check the metrics at http://localhost:9000 diff --git a/connectors/ethereum/dozer-config.yml b/connectors/ethereum/dozer-config.yml index 2d3e38fe..5863ace2 100644 --- a/connectors/ethereum/dozer-config.yml +++ b/connectors/ethereum/dozer-config.yml @@ -1,4 +1,5 @@ app_name: eth-smartcontracts-sample +version: 1 connections: - config: !Ethereum provider: !Log diff --git a/connectors/snowflake/dozer-config.yaml b/connectors/snowflake/dozer-config.yaml index e6ad1c3e..d3e45776 100644 --- a/connectors/snowflake/dozer-config.yaml +++ b/connectors/snowflake/dozer-config.yaml @@ -57,7 +57,3 @@ endpoints: primary_key: - C_CUSTKEY - C_NAME - -api: - app_grpc: - host: "{{APP_HOST}}" diff --git a/connectors/supabase/dozer-config.yaml b/connectors/supabase/dozer-config.yaml index 74f97569..41cfbed0 100644 --- a/connectors/supabase/dozer-config.yaml +++ b/connectors/supabase/dozer-config.yaml @@ -22,7 +22,3 @@ endpoints: - name: zone_data path: /zone_data table_name: zone_data - -api: - app_grpc: - host: "{{APP_HOST}}" \ No newline at end of file diff --git a/package.json b/package.json index e0f5d47d..581e12fa 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "dozer-samples", "version": "1.0.0", "scripts": { - "test": "mocha -r ts-node/register test/**/*.ts", + "test": "mocha -r ts-node/register test/**/*.ts --timeout 60000 --exit", "lint": "eslint test/**/*.js --quiet", "lint:fix": "eslint test/**/*.js --fix --quiet", "generate:grpc": "node ./test/scripts/proto.js" diff --git a/test/connectors/ethereum.ts b/test/connectors/ethereum.ts new file mode 100644 index 00000000..89a7a1ec --- /dev/null +++ b/test/connectors/ethereum.ts @@ -0,0 +1,27 @@ +import { execSync } from 'child_process'; +import path from 'path'; + +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/ethereum'; + +describe('Connectors: ethereum', async () => { + beforeEach(async () => { + const baseDir = path.join(__dirname, '../../'); + const fullPath = path.join(baseDir, TEST_PATH); + process.chdir(fullPath); + + console.log(`Current directory: ${process.cwd()}`); + + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + await assertEndpointsWithRetry(); + dozer.kill(); + console.log('Killed Dozer Ethereum'); + }); +}); diff --git a/test/connectors/local-storage.ts b/test/connectors/local-storage.ts index a6b62e67..4f11d0ec 100644 --- a/test/connectors/local-storage.ts +++ b/test/connectors/local-storage.ts @@ -1,4 +1,5 @@ import { execSync } from 'child_process'; +import path from 'path'; import { assertEndpointsWithRetry, initDozer, } from '../helper'; @@ -7,18 +8,22 @@ const TEST_PATH = './connectors/local-storage'; describe('Connectors: Local Storage', async () => { beforeEach(async () => { - console.log(`Starting directory: ${process.cwd()}`); - // Copy test data to local-storage connector execSync(`${__dirname}/init.sh`, { stdio: 'inherit' }); - process.chdir(TEST_PATH); + const baseDir = path.join(__dirname, '../../'); + const fullPath = path.join(baseDir, TEST_PATH); + process.chdir(fullPath); + + console.log(`Current directory: ${process.cwd()}`); + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); }); it('should run and return API endpoints', async () => { const dozer = await initDozer(); await assertEndpointsWithRetry(); - dozer.kill(9); + dozer.kill(); + console.log('Killed Dozer Local Storage'); }); }); diff --git a/test/connectors/s3.ts b/test/connectors/s3.ts new file mode 100644 index 00000000..041748ed --- /dev/null +++ b/test/connectors/s3.ts @@ -0,0 +1,27 @@ +import { execSync } from 'child_process'; +import path from 'path'; + +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/aws-s3'; + +describe('Connectors: aws-s3', async () => { + beforeEach(async () => { + const baseDir = path.join(__dirname, '../../'); + const fullPath = path.join(baseDir, TEST_PATH); + process.chdir(fullPath); + + console.log(`Current directory: ${process.cwd()}`); + + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + await assertEndpointsWithRetry(); + dozer.kill(); + console.log('Killed Dozer aws-s3'); + }); +}); diff --git a/test/connectors/snowflake.ts b/test/connectors/snowflake.ts new file mode 100644 index 00000000..efcb02c3 --- /dev/null +++ b/test/connectors/snowflake.ts @@ -0,0 +1,27 @@ +import { execSync } from 'child_process'; +import path from 'path'; + +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/snowflake'; + +describe('Connectors: Snowflake', async () => { + beforeEach(async () => { + const baseDir = path.join(__dirname, '../../'); + const fullPath = path.join(baseDir, TEST_PATH); + process.chdir(fullPath); + + console.log(`Current directory: ${process.cwd()}`); + + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + await assertEndpointsWithRetry(); + dozer.kill(); + console.log('Killed Dozer Snowflake'); + }); +}); diff --git a/test/connectors/supabase.ts b/test/connectors/supabase.ts new file mode 100644 index 00000000..6d979ee0 --- /dev/null +++ b/test/connectors/supabase.ts @@ -0,0 +1,27 @@ +import { execSync } from 'child_process'; +import path from 'path'; + +import { + assertEndpointsWithRetry, initDozer, +} from '../helper'; + +const TEST_PATH = './connectors/supabase'; + +describe('Connectors: Supabase', async () => { + beforeEach(async () => { + const baseDir = path.join(__dirname, '../../'); + const fullPath = path.join(baseDir, TEST_PATH); + process.chdir(fullPath); + + console.log(`Current directory: ${process.cwd()}`); + + execSync('rm -rf .dozer && rm -f dozer.lock', { stdio: 'inherit' }); + }); + + it('should run and return API endpoints', async () => { + const dozer = await initDozer(); + await assertEndpointsWithRetry(); + dozer.kill(); + console.log('Killed Dozer Supabase'); + }); +}); diff --git a/test/helper.ts b/test/helper.ts index 15ec019b..20c06fe4 100644 --- a/test/helper.ts +++ b/test/helper.ts @@ -33,6 +33,10 @@ export class CommonGrpc { ): Promise { return promisify(this.client.getEndpoints.bind(this.client, params, metadata, {}))(); } + + public close() { + this.client.close(); + } } export async function checkEndpoints() { @@ -43,6 +47,7 @@ export async function checkEndpoints() { const { endpoints } = endpointsRes; console.log(endpoints); assert(endpoints.length > 0); + client.close(); return true; } catch (err) { console.log('dozer is not up yet, retrying...'); @@ -51,8 +56,8 @@ export async function checkEndpoints() { } export async function assertEndpointsWithRetry( - maxRetries: number = 10, - retryInterval: number = 500, + maxRetries: number = 100, + retryInterval: number = 1000, ) { let i = 0; let endpointsReady = false;