Skip to content

Commit

Permalink
Merge pull request #4 from datacoves/feature/mayrapena1324
Browse files Browse the repository at this point in the history
feature/mayrapena1324
  • Loading branch information
mayrapena1324 authored Nov 12, 2024
2 parents f0d3f2f + e14cac4 commit eeee5e8
Show file tree
Hide file tree
Showing 7 changed files with 28 additions and 25 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/pull_request_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,9 @@ jobs:
run: "dbt-coves generate docs --merge-deferred --state logs"

- name: Run governance checks
run: "pre-commit run --from-ref origin/${{ github.event.pull_request.base.ref }} --to-ref HEAD"
# run: "pre-commit run --from-ref origin/${{ github.event.pull_request.base.ref }} --to-ref HEAD"
run: "pre-commit run --all"


##### Real dbt run given that we passed governance checks
- name: Run dbt build slim mode
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/push-to-main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -160,4 +160,3 @@ jobs:

- name: Drop PR database
run: "dbt --no-write-json run-operation drop_recreate_db --args '{db_name: ${{env.DATACOVES__MAIN__DATABASE}}, recreate: False}'" # yamllint disable-line rule:line-length

3 changes: 1 addition & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ repos:
- id: check-script-ref-and-source
- id: check-model-has-description
- id: check-model-has-properties-file
# - id: check-model-has-all-columns
# - id: check-database-casing-consistency
- id: check-model-has-all-columns
always_run: true

- repo: https://github.com/sqlfluff/sqlfluff
Expand Down
6 changes: 3 additions & 3 deletions orchestrate/dags/sample_dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
tags=["version_1"],
catchup=False,
)
def daily_run():
def sample_dag():
run_dbt = DatacovesDbtOperator(
task_id="run_dbt", bash_command="dbt source freshness && dbt build"
task_id="run_dbt", bash_command="dbt debug"
)


dag = daily_run()
dag = sample_dag()
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ nodes:
run_dbt:
type: task
operator: operators.datacoves.dbt.DatacovesDbtOperator
bash_command: "dbt source freshness && dbt build"
bash_command: "dbt debug"
35 changes: 18 additions & 17 deletions transform/.dbt_coves/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,20 @@ generate:

# UNCOMMENT THE FOLLOWING LINES TO ENABLE AIRFLOW DAGS GENERATION
# BASED ON AIRBYTE AND FIVETRAN CONNECTIONS
# generators_params:
# AirbyteDbtGenerator:
# host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
# port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"
# airbyte_conn_id: airbyte_connection
generators_params:
AirbyteDbtGenerator:
host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"
airbyte_conn_id: airbyte_connection

# dbt_project_path: "{{ env_var('DATACOVES__DBT_HOME') }}"
# run_dbt_compile: false
# run_dbt_deps: false
dbt_project_path: "{{ env_var('DATACOVES__DBT_HOME') }}"
run_dbt_compile: false
run_dbt_deps: false

# AirbyteGenerator:
# host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
# port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"
# airbyte_conn_id: airbyte_connection
AirbyteGenerator:
host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"
airbyte_conn_id: airbyte_connection

# FivetranDbtGenerator:
# api_key: "{{ env_var('DATACOVES__FIVETRAN_API_KEY') }}"
Expand All @@ -55,11 +55,11 @@ generate:
# - fivetran-connection-id-2

# UNCOMMENT THE FOLLOWING LINES TO ENABLE AIRBYTE EXTRACTION
# extract:
# airbyte:
# path: /config/workspace/load/airbyte
# host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
# port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"
extract:
airbyte:
path: /config/workspace/load/airbyte
host: "{{ env_var('DATACOVES__AIRBYTE_HOST_NAME') }}"
port: "{{ env_var('DATACOVES__AIRBYTE_PORT') }}"

# fivetran:
# path: /config/workspace/extract/fivetran
Expand All @@ -74,6 +74,7 @@ generate:
# path: /config/workspace/load/fivetran
# run_connection_tests: true


blue_green:
prod_db_env_var: DATACOVES__MAIN__DATABASE # This holds the name of the production database --- no default
# staging_database: STAGING_DB # Optional name you want to give to the staging database --- no default
Expand Down
2 changes: 2 additions & 0 deletions transform/models/L1_staging/country_data/_country_data.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ version: 2
sources:
- name: COUNTRY_DATA
database: RAW
tags:
- daily_run_airbyte
tables:
- name: COUNTRY_POPULATIONS
description: 'Raw population information from Github Datasets repository'

0 comments on commit eeee5e8

Please sign in to comment.