chore(deps): Update CloudQuery monorepo modules (major) #1030
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "Test AWS Data Resilience Policies" | |
on: | |
pull_request: | |
paths: | |
- "transformations/aws/data-resilience/**" | |
- ".github/workflows/transformations_aws_data_resilience.yml" | |
- "transformations/aws/macros/**" | |
- "transformations/aws/models/**" | |
- "transformations/macros/**" | |
- ".github/workflows/wait_for_required_workflows.yml" | |
- "scripts/workflows/wait_for_required_workflows.js" | |
push: | |
branches: | |
- main | |
paths: | |
- "transformations/aws/data-resilience/**" | |
- ".github/workflows/transformations_aws_data_resilience.yml" | |
- "transformations/aws/macros/**" | |
- "transformations/aws/models/**" | |
- "transformations/macros/**" | |
env: | |
SNOW_USER: ${{ secrets.SNOW_USER }} | |
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }} | |
# DBT assumes the account is in the form of <account>.<region> | |
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}" | |
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }} | |
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }} | |
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }} | |
SNOW_REGION: ${{ secrets.SNOW_REGION }} | |
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }} | |
jobs: | |
prepare: | |
runs-on: ubuntu-latest | |
outputs: | |
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }} | |
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }} | |
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }} | |
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- uses: actions/github-script@v7 | |
id: set-result | |
env: | |
TRANSFORMATION_DIR: transformations/aws/data-resilience | |
with: | |
script: | | |
const fs = require('fs/promises'); | |
const { TRANSFORMATION_DIR: transformation_dir } = process.env; | |
const [postgres, snowflake, bigquery] = await Promise.all([ | |
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false), | |
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false), | |
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false), | |
]); | |
return { | |
transformation_dir, | |
postgres, | |
snowflake, | |
bigquery, | |
}; | |
transformations-aws-data-resilience: | |
permissions: | |
id-token: 'write' | |
contents: 'read' | |
name: ${{ needs.prepare.outputs.transformation_dir }} | |
needs: prepare | |
timeout-minutes: 30 | |
runs-on: ubuntu-latest | |
defaults: | |
run: | |
working-directory: ${{ needs.prepare.outputs.transformation_dir }} | |
services: | |
postgres: | |
image: postgres:11 | |
env: | |
POSTGRES_PASSWORD: pass | |
POSTGRES_USER: postgres | |
POSTGRES_DB: postgres | |
ports: | |
- 5432:5432 | |
# Set health checks to wait until postgres has started | |
options: >- | |
--health-cmd pg_isready | |
--health-interval 10s | |
--health-timeout 5s | |
--health-retries 5 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Authenticate to Google Cloud | |
uses: 'google-github-actions/auth@v2' | |
if: needs.prepare.outputs.bigquery == 'true' | |
with: | |
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider' | |
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com' | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
cache: "pip" | |
cache-dependency-path: "${{ needs.prepare.outputs.transformation_dir }}/requirements.txt" | |
- name: Install dependencies | |
run: pip install -r requirements.txt | |
- name: Setup CloudQuery | |
uses: cloudquery/setup-cloudquery@v4 | |
with: | |
version: v6.11.0 | |
- name: Test Postgres | |
run: | | |
cloudquery migrate tests/postgres.yml | |
dbt run --target dev-pg --profiles-dir ./tests | |
if: needs.prepare.outputs.postgres == 'true' | |
env: | |
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres | |
- name: Test Snowflake | |
run: | | |
cloudquery migrate tests/snowflake.yml | |
dbt run --target dev-snowflake --profiles-dir ./tests | |
if: needs.prepare.outputs.snowflake == 'true' | |
env: | |
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}" | |
- name: Test BigQuery | |
if: needs.prepare.outputs.bigquery == 'true' | |
run: | | |
cloudquery migrate tests/bigquery.yml | |
dbt run --target dev-bigquery --profiles-dir ./tests |