diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b9422be0685..cb713915504 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.8.0b3 +current_version = 1.9.0a1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number diff --git a/.changes/1.8.0-b1.md b/.changes/1.8.0-b1.md deleted file mode 100644 index 069ae318427..00000000000 --- a/.changes/1.8.0-b1.md +++ /dev/null @@ -1,187 +0,0 @@ -## dbt-core 1.8.0-b1 - February 28, 2024 - -### Breaking Changes - -- Remove adapter.get_compiler interface ([#9148](https://github.com/dbt-labs/dbt-core/issues/9148)) -- Move AdapterLogger to adapters folder ([#9151](https://github.com/dbt-labs/dbt-core/issues/9151)) -- Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps --lock' ([#9100](https://github.com/dbt-labs/dbt-core/issues/9100)) -- move event manager setup back to core, remove ref to global EVENT_MANAGER and clean up event manager functions ([#9150](https://github.com/dbt-labs/dbt-core/issues/9150)) -- Remove dbt-tests-adapter and dbt-postgres packages from dbt-core ([#9455](https://github.com/dbt-labs/dbt-core/issues/9455)) - -### Features - -- Initial implementation of unit testing ([#8287](https://github.com/dbt-labs/dbt-core/issues/8287)) -- Unit test manifest artifacts and selection ([#8295](https://github.com/dbt-labs/dbt-core/issues/8295)) -- Support config with tags & meta for unit tests ([#8294](https://github.com/dbt-labs/dbt-core/issues/8294)) -- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859)) -- Enable inline csv fixtures in unit tests ([#8626](https://github.com/dbt-labs/dbt-core/issues/8626)) -- Add drop_schema_named macro ([#8025](https://github.com/dbt-labs/dbt-core/issues/8025)) -- migrate utils to common and adapters folders ([#8924](https://github.com/dbt-labs/dbt-core/issues/8924)) -- Move Agate helper client into common ([#8926](https://github.com/dbt-labs/dbt-core/issues/8926)) -- remove usage of dbt.config.PartialProject from dbt/adapters ([#8928](https://github.com/dbt-labs/dbt-core/issues/8928)) -- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892)) -- Support unit testing incremental models ([#8422](https://github.com/dbt-labs/dbt-core/issues/8422)) -- Add support of csv file fixtures to unit testing ([#8290](https://github.com/dbt-labs/dbt-core/issues/8290)) -- Remove legacy logger ([#8027](https://github.com/dbt-labs/dbt-core/issues/8027)) -- Unit tests support --defer and state:modified ([#8517](https://github.com/dbt-labs/dbt-core/issues/8517)) -- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956)) -- Support source inputs in unit tests ([#8507](https://github.com/dbt-labs/dbt-core/issues/8507)) -- Use daff to render diff displayed in stdout when unit test fails ([#8558](https://github.com/dbt-labs/dbt-core/issues/8558)) -- Move unit testing to test command ([#8979](https://github.com/dbt-labs/dbt-core/issues/8979)) -- Support --empty flag for schema-only dry runs ([#8971](https://github.com/dbt-labs/dbt-core/issues/8971)) -- Support unit tests in non-root packages ([#8285](https://github.com/dbt-labs/dbt-core/issues/8285)) -- Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. in schema files. ([#8699](https://github.com/dbt-labs/dbt-core/issues/8699)) -- Make fixture files full-fledged parts of the manifest and enable partial parsing ([#9067](https://github.com/dbt-labs/dbt-core/issues/9067)) -- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203)) -- In build command run unit tests before models ([#9128](https://github.com/dbt-labs/dbt-core/issues/9128)) -- Move flags from UserConfig in profiles.yml to flags in dbt_project.yml ([#9183](https://github.com/dbt-labs/dbt-core/issues/9183)) -- Added hook support for `dbt source freshness` ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609)) -- Align with order of unit test output when `actual` differs from `expected` ([#9370](https://github.com/dbt-labs/dbt-core/issues/9370)) -- Added support for external nodes in unit test nodes ([#8944](https://github.com/dbt-labs/dbt-core/issues/8944)) -- Enable unit testing versioned models ([#9344](https://github.com/dbt-labs/dbt-core/issues/9344)) -- Enable list command for unit tests ([#8508](https://github.com/dbt-labs/dbt-core/issues/8508)) -- Integration Test Optimizations ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498)) -- Accelerate integration tests with caching. ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498)) -- Cache environment variables ([#9489](https://github.com/dbt-labs/dbt-core/issues/9489)) -- Support meta at the config level for Metric nodes ([#9441](https://github.com/dbt-labs/dbt-core/issues/9441)) -- Add cache to SavedQuery config ([#9540](https://github.com/dbt-labs/dbt-core/issues/9540)) - -### Fixes - -- For packages installed with tarball method, fetch metadata to resolve nested dependencies ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621)) -- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859)) -- Handle unknown `type_code` for model contracts ([#8877](https://github.com/dbt-labs/dbt-core/issues/8877), [#8353](https://github.com/dbt-labs/dbt-core/issues/8353)) -- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846)) -- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857)) -- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836)) -- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939)) -- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864)) -- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895)) -- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974)) -- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010)) -- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016)) -- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000)) -- Use seed file from disk for unit testing if rows not specified in YAML config ([#8652](https://github.com/dbt-labs/dbt-core/issues/8652)) -- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062)) -- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050)) -- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127)) -- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991)) -- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119)) -- Fix parsing f-strings in python models ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976)) -- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112)) -- Support reasonably long unit test names ([#9015](https://github.com/dbt-labs/dbt-core/issues/9015)) -- Fix back-compat parsing for model-level 'tests', source table-level 'tests', and 'tests' defined on model versions ([#9411](https://github.com/dbt-labs/dbt-core/issues/9411)) -- Fix retry command run from CLI ([#9444](https://github.com/dbt-labs/dbt-core/issues/9444)) -- Fix seed and source selection in `dbt docs generate` ([#9161](https://github.com/dbt-labs/dbt-core/issues/9161)) -- Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection during docs generate ([#9456](https://github.com/dbt-labs/dbt-core/issues/9456)) -- Fix node type plurals in FoundStats log message ([#9464](https://github.com/dbt-labs/dbt-core/issues/9464)) -- Run manifest upgrade preprocessing on any older manifest version, including v11 ([#9487](https://github.com/dbt-labs/dbt-core/issues/9487)) -- Update 'compiled_code' context member logic to route based on command ('clone' or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. ([#9502](https://github.com/dbt-labs/dbt-core/issues/9502)) -- Fix bug where Semantic Layer filter strings are parsed into lists. ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507)) -- Initialize invocation context before test fixtures are built. ([##9489](https://github.com/dbt-labs/dbt-core/issues/#9489)) -- When patching versioned models, set constraints after config ([#9364](https://github.com/dbt-labs/dbt-core/issues/9364)) -- only include unmodified semantic mdodels in state:modified selection ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548)) -- Set query headers when manifest is passed in to dbtRunner ([#9546](https://github.com/dbt-labs/dbt-core/issues/9546)) -- Store node_info in node associated logging events ([#9557](https://github.com/dbt-labs/dbt-core/issues/9557)) -- Fix Semantic Model Compare node relations ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548)) -- Clearer no-op logging in stubbed SavedQueryRunner ([#9533](https://github.com/dbt-labs/dbt-core/issues/9533)) -- Fix node_info contextvar handling so incorrect node_info doesn't persist ([#8866](https://github.com/dbt-labs/dbt-core/issues/8866)) -- Add target-path to retry ([#8948](https://github.com/dbt-labs/dbt-core/issues/8948)) - -### Docs - -- fix get_custom_database docstring ([dbt-docs/#9003](https://github.com/dbt-labs/dbt-docs/issues/9003)) - -### Under the Hood - -- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537)) -- Add unit testing functional tests ([#8512](https://github.com/dbt-labs/dbt-core/issues/8512)) -- Remove usage of dbt.include.global_project in dbt/adapters ([#8925](https://github.com/dbt-labs/dbt-core/issues/8925)) -- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893)) -- remove dbt.flags.MP_CONTEXT usage in dbt/adapters ([#8967](https://github.com/dbt-labs/dbt-core/issues/8967)) -- Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters ([#8969](https://github.com/dbt-labs/dbt-core/issues/8969)) -- Move CatalogRelationTypes test case to the shared test suite to be reused by adapter maintainers ([#8952](https://github.com/dbt-labs/dbt-core/issues/8952)) -- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a)) -- Removing unused 'documentable' ([#8871](https://github.com/dbt-labs/dbt-core/issues/8871)) -- Remove use of dbt/core exceptions in dbt/adapter ([#8920](https://github.com/dbt-labs/dbt-core/issues/8920)) -- Cache dbt plugin modules to improve integration test performance ([#9029](https://github.com/dbt-labs/dbt-core/issues/9029)) -- Consolidate deferral methods & flags ([#7965](https://github.com/dbt-labs/dbt-core/issues/7965), [#8715](https://github.com/dbt-labs/dbt-core/issues/8715)) -- Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock variance ([#9057](https://github.com/dbt-labs/dbt-core/issues/9057)) -- Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific event types and protos ([#8927](https://github.com/dbt-labs/dbt-core/issues/8927), [#8918](https://github.com/dbt-labs/dbt-core/issues/8918)) -- Clean up unused adaptor folders ([#9123](https://github.com/dbt-labs/dbt-core/issues/9123)) -- Move column constraints into common/contracts, removing another dependency of adapters on core. ([#9024](https://github.com/dbt-labs/dbt-core/issues/9024)) -- Move dbt.semver to dbt.common.semver and update references. ([#9039](https://github.com/dbt-labs/dbt-core/issues/9039)) -- Move lowercase utils method to common ([#9180](https://github.com/dbt-labs/dbt-core/issues/9180)) -- Remove usages of dbt.clients.jinja in dbt/adapters ([#9205](https://github.com/dbt-labs/dbt-core/issues/9205)) -- Remove usage of dbt.contracts in dbt/adapters ([#9208](https://github.com/dbt-labs/dbt-core/issues/9208)) -- Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters ([#9214](https://github.com/dbt-labs/dbt-core/issues/9214)) -- Introduce RelationConfig Protocol, consolidate Relation.create_from ([#9215](https://github.com/dbt-labs/dbt-core/issues/9215)) -- remove manifest from adapter.set_relations_cache signature ([#9217](https://github.com/dbt-labs/dbt-core/issues/9217)) -- remove manifest from adapter catalog method signatures ([#9218](https://github.com/dbt-labs/dbt-core/issues/9218)) -- Move BaseConfig, Metadata and various other contract classes from model_config to common/contracts/config ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919)) -- Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro ([#9244](https://github.com/dbt-labs/dbt-core/issues/9244)) -- pass query header context to MacroQueryStringSetter ([#9249](https://github.com/dbt-labs/dbt-core/issues/9249), [#9250](https://github.com/dbt-labs/dbt-core/issues/9250)) -- add macro_context_generator on adapter ([#9247](https://github.com/dbt-labs/dbt-core/issues/9247)) -- pass mp_context to adapter factory as argument instead of import ([#9025](https://github.com/dbt-labs/dbt-core/issues/9025)) -- have dbt-postgres use RelationConfig protocol for materialized views' ([#9292](https://github.com/dbt-labs/dbt-core/issues/9292)) -- move system.py to common as dbt-bigquery relies on it to call gcloud ([#9293](https://github.com/dbt-labs/dbt-core/issues/9293)) -- Reorganizing event definitions to define core events in dbt/events rather than dbt/common ([#9152](https://github.com/dbt-labs/dbt-core/issues/9152)) -- move exceptions used only in dbt/common to dbt/common/exceptions ([#9332](https://github.com/dbt-labs/dbt-core/issues/9332)) -- Remove usage of dbt.adapters.factory in dbt/common ([#9334](https://github.com/dbt-labs/dbt-core/issues/9334)) -- Accept valid_error_names in WarnErrorOptions constructor, remove global usage of event modules ([#9337](https://github.com/dbt-labs/dbt-core/issues/9337)) -- Move result objects to dbt.artifacts ([#9193](https://github.com/dbt-labs/dbt-core/issues/9193)) -- dbt Labs OSS standardization of docs and templates. ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252)) -- Add dbt-common as a dependency and remove dbt/common ([#9357](https://github.com/dbt-labs/dbt-core/issues/9357)) -- move cache exceptions to dbt/adapters ([#9362](https://github.com/dbt-labs/dbt-core/issues/9362)) -- Clean up macro contexts. ([#9422](https://github.com/dbt-labs/dbt-core/issues/9422)) -- Add the @requires.manifest decorator to the retry command. ([#9426](https://github.com/dbt-labs/dbt-core/issues/9426)) -- Move WritableManifest + Documentation to dbt/artifacts ([#9378](https://github.com/dbt-labs/dbt-core/issues/9378), [#9379](https://github.com/dbt-labs/dbt-core/issues/9379)) -- Define Macro and Group resources in dbt/artifacts ([#9381](https://github.com/dbt-labs/dbt-core/issues/9381), [#9382](https://github.com/dbt-labs/dbt-core/issues/9382)) -- Move `SavedQuery` data definition to `dbt/artifacts` ([#9386](https://github.com/dbt-labs/dbt-core/issues/9386)) -- Migrate data parts of `Metric` node to dbt/artifacts ([#9383](https://github.com/dbt-labs/dbt-core/issues/9383)) -- Move data portion of `SemanticModel` to dbt/artifacts ([#9387](https://github.com/dbt-labs/dbt-core/issues/9387)) -- Move data parts of `Exposure` class to dbt/artifacts ([#9380](https://github.com/dbt-labs/dbt-core/issues/9380)) -- Start using `Mergeable` from dbt-common ([#9505](https://github.com/dbt-labs/dbt-core/issues/9505)) -- Move manifest nodes to artifacts ([#9388](https://github.com/dbt-labs/dbt-core/issues/9388)) -- Move data parts of `SourceDefinition` class to dbt/artifacts ([#9384](https://github.com/dbt-labs/dbt-core/issues/9384)) -- Remove uses of Replaceable class ([#7802](https://github.com/dbt-labs/dbt-core/issues/7802)) -- Make dbt-core compatible with Python 3.12 ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007)) -- Restrict protobuf to major version 4. ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566)) -- Remove references to dbt.tracking and dbt.flags from dbt/artifacts ([#9390](https://github.com/dbt-labs/dbt-core/issues/9390)) -- Implement primary key inference for model nodes ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652)) -- Define UnitTestDefinition resource in dbt/artifacts/resources ([#9667](https://github.com/dbt-labs/dbt-core/issues/9667)) -- Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest ([#9567](https://github.com/dbt-labs/dbt-core/issues/9567)) - -### Dependencies - -- Bump actions/checkout from 3 to 4 ([#8781](https://github.com/dbt-labs/dbt-core/pull/8781)) -- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/pull/8892)) -- Update typing-extensions version to >=4.4 ([#9012](https://github.com/dbt-labs/dbt-core/pull/9012)) -- Bump ddtrace from 2.1.7 to 2.3.0 ([#9132](https://github.com/dbt-labs/dbt-core/pull/9132)) -- Bump freezegun from 0.3.12 to 1.3.0 ([#9197](https://github.com/dbt-labs/dbt-core/pull/9197)) -- Bump actions/setup-python from 4 to 5 ([#9267](https://github.com/dbt-labs/dbt-core/pull/9267)) -- Bump actions/download-artifact from 3 to 4 ([#9374](https://github.com/dbt-labs/dbt-core/pull/9374)) -- remove dbt/adapters and add dependency on dbt-adapters ([#9430](https://github.com/dbt-labs/dbt-core/pull/9430)) -- Bump actions/cache from 3 to 4 ([#9471](https://github.com/dbt-labs/dbt-core/pull/9471)) -- Bump peter-evans/create-pull-request from 5 to 6 ([#9552](https://github.com/dbt-labs/dbt-core/pull/9552)) -- Cap dbt-semantic-interfaces version range to <0.6 ([#9671](https://github.com/dbt-labs/dbt-core/pull/9671)) -- bump dbt-common to accept major version 1 ([#9690](https://github.com/dbt-labs/dbt-core/pull/9690)) - -### Security - -- Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 ([#CVE-2024-22195](https://github.com/dbt-labs/dbt-core/pull/CVE-2024-22195)) - -### Contributors -- [@LeoTheGriff](https://github.com/LeoTheGriff) ([#9003](https://github.com/dbt-labs/dbt-core/issues/9003)) -- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203)) -- [@adamlopez](https://github.com/adamlopez) ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621)) -- [@aliceliu](https://github.com/aliceliu) ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652)) -- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a)) -- [@colin-rorgers-dbt](https://github.com/colin-rorgers-dbt) ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919)) -- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507)) -- [@l1xnan](https://github.com/l1xnan) ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007)) -- [@mederka](https://github.com/mederka) ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976)) -- [@ofek1weiss](https://github.com/ofek1weiss) ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609)) -- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112)) -- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/pull/9012), [#9671](https://github.com/dbt-labs/dbt-core/pull/9671)) -- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252)) diff --git a/.changes/1.8.0-b2.md b/.changes/1.8.0-b2.md deleted file mode 100644 index e69170a41de..00000000000 --- a/.changes/1.8.0-b2.md +++ /dev/null @@ -1,53 +0,0 @@ -## dbt-core 1.8.0-b2 - April 03, 2024 - -### Features - -- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798)) -- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237)) -- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766)) -- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804)) - -### Fixes - -- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319)) -- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583)) -- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360)) -- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581)) -- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582)) -- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570)) -- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860)) -- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532)) -- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755)) -- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624)) -- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511)) -- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593)) -- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770)) -- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787)) -- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608)) -- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078)) -- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827)) - -### Docs - -- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430)) - -### Under the Hood - -- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641)) -- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627)) -- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619)) - -### Dependencies - -- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/pull/9470)) -- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/pull/9566)) -- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/pull/9659)) - -### Contributors -- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641)) -- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430)) -- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798)) -- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582)) -- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627)) -- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319)) -- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570)) diff --git a/.changes/1.8.0-b3.md b/.changes/1.8.0-b3.md deleted file mode 100644 index 0b9ce6aaaca..00000000000 --- a/.changes/1.8.0-b3.md +++ /dev/null @@ -1,48 +0,0 @@ -## dbt-core 1.8.0-b3 - April 18, 2024 - -### Features - -- Support scrubbing secret vars ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247)) -- Add wildcard support to the group selector method ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811)) -- source freshness precomputes metadata-based freshness in batch, if possible ([#8705](https://github.com/dbt-labs/dbt-core/issues/8705)) -- Better error message when trying to select a disabled model ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747)) -- Support SQL in unit testing fixtures ([#9405](https://github.com/dbt-labs/dbt-core/issues/9405)) - -### Fixes - -- fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761)) -- Fix conflict with newer versions of Snowplow tracker ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719)) -- Only create the packages-install-path / dbt_packages folder during dbt deps ([#6985](https://github.com/dbt-labs/dbt-core/issues/6985), [#9584](https://github.com/dbt-labs/dbt-core/issues/9584)) -- Exclude password-like fields for considering reparse ([#9795](https://github.com/dbt-labs/dbt-core/issues/9795)) -- Fixed query comments test ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860)) -- Begin warning people about spaces in model names ([#9397](https://github.com/dbt-labs/dbt-core/issues/9397)) -- Disambiguiate FreshnessConfigProblem error message ([#9891](https://github.com/dbt-labs/dbt-core/issues/9891)) - -### Under the Hood - -- Remove non dbt.artifacts dbt.* imports from dbt/artifacts ([#9926](https://github.com/dbt-labs/dbt-core/issues/9926)) -- Migrate to using `error_tag` provided by `dbt-common` ([#9914](https://github.com/dbt-labs/dbt-core/issues/9914)) -- Add a test for semantic manifest and move test fixtures needed for it ([#9665](https://github.com/dbt-labs/dbt-core/issues/9665)) - -### Dependencies - -- Relax pathspec upper bound version restriction ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373)) -- Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker ([#9687](https://github.com/dbt-labs/dbt-core/issues/9687)) -- Remove duplicate dependency of protobuf in dev-requirements ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830)) -- Bump black from 23.3.0 to >=24.3.0,<25.0 ([#8074](https://github.com/dbt-labs/dbt-core/issues/8074)) - -### Security - -- Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951)) - -### Contributors -- [@SamuelBFavarin](https://github.com/SamuelBFavarin) ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747)) -- [@akurdyukov](https://github.com/akurdyukov) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719)) -- [@damian3031](https://github.com/damian3031) ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860)) -- [@edgarrmondragon](https://github.com/edgarrmondragon) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719)) -- [@emmoop](https://github.com/emmoop) ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951)) -- [@heysweet](https://github.com/heysweet) ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811)) -- [@jx2lee](https://github.com/jx2lee) ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761)) -- [@nielspardon](https://github.com/nielspardon) ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247)) -- [@niteshy](https://github.com/niteshy) ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830)) -- [@rzjfr](https://github.com/rzjfr) ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373)) diff --git a/.changes/1.8.0/Breaking Changes-20231127-114757.yaml b/.changes/1.8.0/Breaking Changes-20231127-114757.yaml deleted file mode 100644 index b7b8d030d6a..00000000000 --- a/.changes/1.8.0/Breaking Changes-20231127-114757.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Breaking Changes -body: Remove adapter.get_compiler interface -time: 2023-11-27T11:47:57.443202-05:00 -custom: - Author: michelleark - Issue: "9148" diff --git a/.changes/1.8.0/Breaking Changes-20231128-134356.yaml b/.changes/1.8.0/Breaking Changes-20231128-134356.yaml deleted file mode 100644 index b3204c1418b..00000000000 --- a/.changes/1.8.0/Breaking Changes-20231128-134356.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Breaking Changes -body: Move AdapterLogger to adapters folder -time: 2023-11-28T13:43:56.853925-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9151" diff --git a/.changes/1.8.0/Breaking Changes-20231129-091921.yaml b/.changes/1.8.0/Breaking Changes-20231129-091921.yaml deleted file mode 100644 index c58c598a46e..00000000000 --- a/.changes/1.8.0/Breaking Changes-20231129-091921.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Breaking Changes -body: Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps - --lock' -time: 2023-11-29T09:19:21.071212+01:00 -custom: - Author: jtcohen6 - Issue: "9100" diff --git a/.changes/1.8.0/Breaking Changes-20231130-135348.yaml b/.changes/1.8.0/Breaking Changes-20231130-135348.yaml deleted file mode 100644 index df673d69367..00000000000 --- a/.changes/1.8.0/Breaking Changes-20231130-135348.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Breaking Changes -body: move event manager setup back to core, remove ref to global EVENT_MANAGER and - clean up event manager functions -time: 2023-11-30T13:53:48.645192-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9150" diff --git a/.changes/1.8.0/Breaking Changes-20240130-140550.yaml b/.changes/1.8.0/Breaking Changes-20240130-140550.yaml deleted file mode 100644 index aac49fecb90..00000000000 --- a/.changes/1.8.0/Breaking Changes-20240130-140550.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Breaking Changes -body: Remove dbt-tests-adapter and dbt-postgres packages from dbt-core -time: 2024-01-30T14:05:50.291291-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9455" diff --git a/.changes/1.8.0/Dependencies-20231005-151848.yaml b/.changes/1.8.0/Dependencies-20231005-151848.yaml deleted file mode 100644 index bca488d6485..00000000000 --- a/.changes/1.8.0/Dependencies-20231005-151848.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump actions/checkout from 3 to 4" -time: 2023-10-05T15:18:48.00000Z -custom: - Author: dependabot[bot] - Issue: 8781 diff --git a/.changes/1.8.0/Dependencies-20231031-131954.yaml b/.changes/1.8.0/Dependencies-20231031-131954.yaml deleted file mode 100644 index e94d4c2405f..00000000000 --- a/.changes/1.8.0/Dependencies-20231031-131954.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Begin using DSI 0.4.x -time: 2023-10-31T13:19:54.750009-07:00 -custom: - Author: QMalcolm peterallenwebb - Issue: "8892" diff --git a/.changes/1.8.0/Dependencies-20231106-130051.yaml b/.changes/1.8.0/Dependencies-20231106-130051.yaml deleted file mode 100644 index 6e42d7920b7..00000000000 --- a/.changes/1.8.0/Dependencies-20231106-130051.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Update typing-extensions version to >=4.4 -time: 2023-11-06T13:00:51.062386-08:00 -custom: - Author: tlento - Issue: "9012" diff --git a/.changes/1.8.0/Dependencies-20231122-001840.yaml b/.changes/1.8.0/Dependencies-20231122-001840.yaml deleted file mode 100644 index b92795f10d9..00000000000 --- a/.changes/1.8.0/Dependencies-20231122-001840.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump ddtrace from 2.1.7 to 2.3.0" -time: 2023-11-22T00:18:40.00000Z -custom: - Author: dependabot[bot] - Issue: 9132 diff --git a/.changes/1.8.0/Dependencies-20231204-000945.yaml b/.changes/1.8.0/Dependencies-20231204-000945.yaml deleted file mode 100644 index a9ff267db30..00000000000 --- a/.changes/1.8.0/Dependencies-20231204-000945.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump freezegun from 0.3.12 to 1.3.0" -time: 2023-12-04T00:09:45.00000Z -custom: - Author: dependabot[bot] - Issue: 9197 diff --git a/.changes/1.8.0/Dependencies-20231211-005651.yaml b/.changes/1.8.0/Dependencies-20231211-005651.yaml deleted file mode 100644 index bffcfbb0ade..00000000000 --- a/.changes/1.8.0/Dependencies-20231211-005651.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump actions/setup-python from 4 to 5" -time: 2023-12-11T00:56:51.00000Z -custom: - Author: dependabot[bot] - Issue: 9267 diff --git a/.changes/1.8.0/Dependencies-20240115-012030.yaml b/.changes/1.8.0/Dependencies-20240115-012030.yaml deleted file mode 100644 index d1819c8beac..00000000000 --- a/.changes/1.8.0/Dependencies-20240115-012030.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump actions/download-artifact from 3 to 4" -time: 2024-01-15T01:20:30.00000Z -custom: - Author: dependabot[bot] - Issue: 9374 diff --git a/.changes/1.8.0/Dependencies-20240117-100818.yaml b/.changes/1.8.0/Dependencies-20240117-100818.yaml deleted file mode 100644 index f8f1e65b593..00000000000 --- a/.changes/1.8.0/Dependencies-20240117-100818.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Relax pathspec upper bound version restriction -time: 2024-01-17T10:08:18.009949641+01:00 -custom: - Author: rzjfr - Issue: "9373" diff --git a/.changes/1.8.0/Dependencies-20240123-105843.yaml b/.changes/1.8.0/Dependencies-20240123-105843.yaml deleted file mode 100644 index 4c2995c5ce6..00000000000 --- a/.changes/1.8.0/Dependencies-20240123-105843.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: remove dbt/adapters and add dependency on dbt-adapters -time: 2024-01-23T10:58:43.286952-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9430" diff --git a/.changes/1.8.0/Dependencies-20240129-005734.yaml b/.changes/1.8.0/Dependencies-20240129-005734.yaml deleted file mode 100644 index 90a5ee0c0c3..00000000000 --- a/.changes/1.8.0/Dependencies-20240129-005734.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump actions/upload-artifact from 3 to 4" -time: 2024-01-29T00:57:34.00000Z -custom: - Author: dependabot[bot] - Issue: 9470 diff --git a/.changes/1.8.0/Dependencies-20240129-005743.yaml b/.changes/1.8.0/Dependencies-20240129-005743.yaml deleted file mode 100644 index 6e491069322..00000000000 --- a/.changes/1.8.0/Dependencies-20240129-005743.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump actions/cache from 3 to 4" -time: 2024-01-29T00:57:43.00000Z -custom: - Author: dependabot[bot] - Issue: 9471 diff --git a/.changes/1.8.0/Dependencies-20240212-011324.yaml b/.changes/1.8.0/Dependencies-20240212-011324.yaml deleted file mode 100644 index 200932ca249..00000000000 --- a/.changes/1.8.0/Dependencies-20240212-011324.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump peter-evans/create-pull-request from 5 to 6" -time: 2024-02-12T01:13:24.00000Z -custom: - Author: dependabot[bot] - Issue: 9552 diff --git a/.changes/1.8.0/Dependencies-20240222-102947.yaml b/.changes/1.8.0/Dependencies-20240222-102947.yaml deleted file mode 100644 index 78c7a994b35..00000000000 --- a/.changes/1.8.0/Dependencies-20240222-102947.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Restrict protobuf to 4.* versions -time: 2024-02-22T10:29:47.595435-08:00 -custom: - Author: QMalcolm - Issue: "9566" diff --git a/.changes/1.8.0/Dependencies-20240226-004412.yaml b/.changes/1.8.0/Dependencies-20240226-004412.yaml deleted file mode 100644 index 8300a32e086..00000000000 --- a/.changes/1.8.0/Dependencies-20240226-004412.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump codecov/codecov-action from 3 to 4" -time: 2024-02-26T00:44:12.00000Z -custom: - Author: dependabot[bot] - Issue: 9659 diff --git a/.changes/1.8.0/Dependencies-20240226-123502.yaml b/.changes/1.8.0/Dependencies-20240226-123502.yaml deleted file mode 100644 index f1ad49c393d..00000000000 --- a/.changes/1.8.0/Dependencies-20240226-123502.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Cap dbt-semantic-interfaces version range to <0.6 -time: 2024-02-26T12:35:02.643779-08:00 -custom: - Author: tlento - Issue: "9671" diff --git a/.changes/1.8.0/Dependencies-20240227-142138.yaml b/.changes/1.8.0/Dependencies-20240227-142138.yaml deleted file mode 100644 index 0b20244d773..00000000000 --- a/.changes/1.8.0/Dependencies-20240227-142138.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker -time: 2024-02-27T14:21:38.394757-05:00 -custom: - Author: michelleark - Issue: "9687" diff --git a/.changes/1.8.0/Dependencies-20240227-151115.yaml b/.changes/1.8.0/Dependencies-20240227-151115.yaml deleted file mode 100644 index d9a99d7e3dd..00000000000 --- a/.changes/1.8.0/Dependencies-20240227-151115.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: bump dbt-common to accept major version 1 -time: 2024-02-27T15:11:15.583604-05:00 -custom: - Author: michelleark - Issue: "9690" diff --git a/.changes/1.8.0/Dependencies-20240331-103917.yaml b/.changes/1.8.0/Dependencies-20240331-103917.yaml deleted file mode 100644 index c4cb75dd449..00000000000 --- a/.changes/1.8.0/Dependencies-20240331-103917.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Remove duplicate dependency of protobuf in dev-requirements -time: 2024-03-31T10:39:17.432017-07:00 -custom: - Author: niteshy - Issue: "9830" diff --git a/.changes/1.8.0/Dependencies-20240410-183321.yaml b/.changes/1.8.0/Dependencies-20240410-183321.yaml deleted file mode 100644 index 7fb86e98c3b..00000000000 --- a/.changes/1.8.0/Dependencies-20240410-183321.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump black from 23.3.0 to >=24.3.0,<25.0" -time: 2024-04-10T18:33:21.00000Z -custom: - Author: dependabot[bot] - Issue: 8074 diff --git a/.changes/1.8.0/Docs-20230615-105157.yaml b/.changes/1.8.0/Docs-20230615-105157.yaml deleted file mode 100644 index 327e33b2e03..00000000000 --- a/.changes/1.8.0/Docs-20230615-105157.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Add analytics for dbt.com -time: 2023-06-15T10:51:57.838991+02:00 -custom: - Author: b-per - Issue: "430" diff --git a/.changes/1.8.0/Docs-20231106-123157.yaml b/.changes/1.8.0/Docs-20231106-123157.yaml deleted file mode 100644 index 28b4cd8e755..00000000000 --- a/.changes/1.8.0/Docs-20231106-123157.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: fix get_custom_database docstring -time: 2023-11-06T12:31:57.525711Z -custom: - Author: LeoTheGriff - Issue: "9003" diff --git a/.changes/1.8.0/Features-20230802-145011.yaml b/.changes/1.8.0/Features-20230802-145011.yaml deleted file mode 100644 index f9c742824c7..00000000000 --- a/.changes/1.8.0/Features-20230802-145011.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Initial implementation of unit testing -time: 2023-08-02T14:50:11.391992-04:00 -custom: - Author: gshank - Issue: "8287" diff --git a/.changes/1.8.0/Features-20230828-101825.yaml b/.changes/1.8.0/Features-20230828-101825.yaml deleted file mode 100644 index 13101ecbacd..00000000000 --- a/.changes/1.8.0/Features-20230828-101825.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Unit test manifest artifacts and selection -time: 2023-08-28T10:18:25.958929-04:00 -custom: - Author: gshank - Issue: "8295" diff --git a/.changes/1.8.0/Features-20230906-234741.yaml b/.changes/1.8.0/Features-20230906-234741.yaml deleted file mode 100644 index ca94f1fc6c5..00000000000 --- a/.changes/1.8.0/Features-20230906-234741.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support config with tags & meta for unit tests -time: 2023-09-06T23:47:41.059915-04:00 -custom: - Author: michelleark - Issue: "8294" diff --git a/.changes/1.8.0/Features-20230915-123733.yaml b/.changes/1.8.0/Features-20230915-123733.yaml deleted file mode 100644 index 146ad8ef89a..00000000000 --- a/.changes/1.8.0/Features-20230915-123733.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: 'Allow adapters to include package logs in dbt standard logging ' -time: 2023-09-15T12:37:33.862862-07:00 -custom: - Author: colin-rogers-dbt - Issue: "7859" diff --git a/.changes/1.8.0/Features-20230928-163205.yaml b/.changes/1.8.0/Features-20230928-163205.yaml deleted file mode 100644 index 7f9b7c047ac..00000000000 --- a/.changes/1.8.0/Features-20230928-163205.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Enable inline csv fixtures in unit tests -time: 2023-09-28T16:32:05.573776-04:00 -custom: - Author: gshank - Issue: "8626" diff --git a/.changes/1.8.0/Features-20231017-143620.yaml b/.changes/1.8.0/Features-20231017-143620.yaml deleted file mode 100644 index dfdd2b6f4b2..00000000000 --- a/.changes/1.8.0/Features-20231017-143620.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add drop_schema_named macro -time: 2023-10-17T14:36:20.612289-07:00 -custom: - Author: colin-rogers-dbt - Issue: "8025" diff --git a/.changes/1.8.0/Features-20231026-110821.yaml b/.changes/1.8.0/Features-20231026-110821.yaml deleted file mode 100644 index 7fffb6fe569..00000000000 --- a/.changes/1.8.0/Features-20231026-110821.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: migrate utils to common and adapters folders -time: 2023-10-26T11:08:21.458709-07:00 -custom: - Author: colin-rogers-dbt - Issue: "8924" diff --git a/.changes/1.8.0/Features-20231026-123556.yaml b/.changes/1.8.0/Features-20231026-123556.yaml deleted file mode 100644 index cf37554c08d..00000000000 --- a/.changes/1.8.0/Features-20231026-123556.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Move Agate helper client into common -time: 2023-10-26T12:35:56.538587-07:00 -custom: - Author: MichelleArk - Issue: "8926" diff --git a/.changes/1.8.0/Features-20231026-123913.yaml b/.changes/1.8.0/Features-20231026-123913.yaml deleted file mode 100644 index d3ada7cb691..00000000000 --- a/.changes/1.8.0/Features-20231026-123913.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: remove usage of dbt.config.PartialProject from dbt/adapters -time: 2023-10-26T12:39:13.904116-07:00 -custom: - Author: MichelleArk - Issue: "8928" diff --git a/.changes/1.8.0/Features-20231031-132022.yaml b/.changes/1.8.0/Features-20231031-132022.yaml deleted file mode 100644 index 363f3405399..00000000000 --- a/.changes/1.8.0/Features-20231031-132022.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add exports to SavedQuery spec -time: 2023-10-31T13:20:22.448158-07:00 -custom: - Author: QMalcolm peterallenwebb - Issue: "8892" diff --git a/.changes/1.8.0/Features-20231101-101845.yaml b/.changes/1.8.0/Features-20231101-101845.yaml deleted file mode 100644 index 603990ce2e7..00000000000 --- a/.changes/1.8.0/Features-20231101-101845.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support unit testing incremental models -time: 2023-11-01T10:18:45.341781-04:00 -custom: - Author: michelleark - Issue: "8422" diff --git a/.changes/1.8.0/Features-20231106-194752.yaml b/.changes/1.8.0/Features-20231106-194752.yaml deleted file mode 100644 index 2ea6553d339..00000000000 --- a/.changes/1.8.0/Features-20231106-194752.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add support of csv file fixtures to unit testing -time: 2023-11-06T19:47:52.501495-06:00 -custom: - Author: emmyoop - Issue: "8290" diff --git a/.changes/1.8.0/Features-20231107-135635.yaml b/.changes/1.8.0/Features-20231107-135635.yaml deleted file mode 100644 index 711ba4ce102..00000000000 --- a/.changes/1.8.0/Features-20231107-135635.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Remove legacy logger -time: 2023-11-07T13:56:35.186648-08:00 -custom: - Author: colin-rogers-dbt - Issue: "8027" diff --git a/.changes/1.8.0/Features-20231107-231006.yaml b/.changes/1.8.0/Features-20231107-231006.yaml deleted file mode 100644 index 0865c72cc58..00000000000 --- a/.changes/1.8.0/Features-20231107-231006.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Unit tests support --defer and state:modified -time: 2023-11-07T23:10:06.376588-05:00 -custom: - Author: jtcohen6 - Issue: "8517" diff --git a/.changes/1.8.0/Features-20231110-154255.yaml b/.changes/1.8.0/Features-20231110-154255.yaml deleted file mode 100644 index 77283846646..00000000000 --- a/.changes/1.8.0/Features-20231110-154255.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support setting export configs hierarchically via saved query and project configs -time: 2023-11-10T15:42:55.042317-08:00 -custom: - Author: QMalcolm - Issue: "8956" diff --git a/.changes/1.8.0/Features-20231111-191150.yaml b/.changes/1.8.0/Features-20231111-191150.yaml deleted file mode 100644 index 4afff04dbff..00000000000 --- a/.changes/1.8.0/Features-20231111-191150.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support source inputs in unit tests -time: 2023-11-11T19:11:50.870494-05:00 -custom: - Author: gshank - Issue: "8507" diff --git a/.changes/1.8.0/Features-20231114-101555.yaml b/.changes/1.8.0/Features-20231114-101555.yaml deleted file mode 100644 index 2ed80f9bee7..00000000000 --- a/.changes/1.8.0/Features-20231114-101555.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Use daff to render diff displayed in stdout when unit test fails -time: 2023-11-14T10:15:55.689307-05:00 -custom: - Author: michelleark - Issue: "8558" diff --git a/.changes/1.8.0/Features-20231115-092005.yaml b/.changes/1.8.0/Features-20231115-092005.yaml deleted file mode 100644 index 6f156764aff..00000000000 --- a/.changes/1.8.0/Features-20231115-092005.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. -time: 2023-11-15T09:20:05.12461Z -custom: - Author: barton996 - Issue: "7798" diff --git a/.changes/1.8.0/Features-20231116-144006.yaml b/.changes/1.8.0/Features-20231116-144006.yaml deleted file mode 100644 index b70e89e76ec..00000000000 --- a/.changes/1.8.0/Features-20231116-144006.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Move unit testing to test command -time: 2023-11-16T14:40:06.121336-05:00 -custom: - Author: gshank - Issue: "8979" diff --git a/.changes/1.8.0/Features-20231116-234049.yaml b/.changes/1.8.0/Features-20231116-234049.yaml deleted file mode 100644 index 786c15311a4..00000000000 --- a/.changes/1.8.0/Features-20231116-234049.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support --empty flag for schema-only dry runs -time: 2023-11-16T23:40:49.96651-05:00 -custom: - Author: michelleark - Issue: "8971" diff --git a/.changes/1.8.0/Features-20231130-130948.yaml b/.changes/1.8.0/Features-20231130-130948.yaml deleted file mode 100644 index 7144deb8112..00000000000 --- a/.changes/1.8.0/Features-20231130-130948.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support unit tests in non-root packages -time: 2023-11-30T13:09:48.206007-05:00 -custom: - Author: gshank - Issue: "8285" diff --git a/.changes/1.8.0/Features-20231205-131717.yaml b/.changes/1.8.0/Features-20231205-131717.yaml deleted file mode 100644 index c4a35391ee1..00000000000 --- a/.changes/1.8.0/Features-20231205-131717.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. - in schema files. -time: 2023-12-05T13:17:17.647765-06:00 -custom: - Author: emmyoop - Issue: "8699" diff --git a/.changes/1.8.0/Features-20231205-200447.yaml b/.changes/1.8.0/Features-20231205-200447.yaml deleted file mode 100644 index 6af669a81e3..00000000000 --- a/.changes/1.8.0/Features-20231205-200447.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Make fixture files full-fledged parts of the manifest and enable partial parsing -time: 2023-12-05T20:04:47.117029-05:00 -custom: - Author: gshank - Issue: "9067" diff --git a/.changes/1.8.0/Features-20231206-181458.yaml b/.changes/1.8.0/Features-20231206-181458.yaml deleted file mode 100644 index f14c1af4cbd..00000000000 --- a/.changes/1.8.0/Features-20231206-181458.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: Adds support for parsing conversion metric related properties for the semantic - layer. -time: 2023-12-06T18:14:58.688221-05:00 -custom: - Author: WilliamDee - Issue: "9203" diff --git a/.changes/1.8.0/Features-20231212-150556.yaml b/.changes/1.8.0/Features-20231212-150556.yaml deleted file mode 100644 index 299333481df..00000000000 --- a/.changes/1.8.0/Features-20231212-150556.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: In build command run unit tests before models -time: 2023-12-12T15:05:56.778829-05:00 -custom: - Author: gshank - Issue: "9128" diff --git a/.changes/1.8.0/Features-20231218-195854.yaml b/.changes/1.8.0/Features-20231218-195854.yaml deleted file mode 100644 index 2a78826aff0..00000000000 --- a/.changes/1.8.0/Features-20231218-195854.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Move flags from UserConfig in profiles.yml to flags in dbt_project.yml -time: 2023-12-18T19:58:54.075811-05:00 -custom: - Author: gshank - Issue: "9183" diff --git a/.changes/1.8.0/Features-20231231-171205.yaml b/.changes/1.8.0/Features-20231231-171205.yaml deleted file mode 100644 index 08f5ebe5aad..00000000000 --- a/.changes/1.8.0/Features-20231231-171205.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Added hook support for `dbt source freshness` -time: 2023-12-31T17:12:05.587185+02:00 -custom: - Author: ofek1weiss - Issue: "5609" diff --git a/.changes/1.8.0/Features-20240118-135651.yaml b/.changes/1.8.0/Features-20240118-135651.yaml deleted file mode 100644 index 33892b22d90..00000000000 --- a/.changes/1.8.0/Features-20240118-135651.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Align with order of unit test output when `actual` differs from `expected` -time: 2024-01-18T13:56:51.131001-07:00 -custom: - Author: dbeatty10 - Issue: "9370" diff --git a/.changes/1.8.0/Features-20240119-101335.yaml b/.changes/1.8.0/Features-20240119-101335.yaml deleted file mode 100644 index 0dcc711797c..00000000000 --- a/.changes/1.8.0/Features-20240119-101335.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Added support for external nodes in unit test nodes -time: 2024-01-19T10:13:35.589099-06:00 -custom: - Author: QMalcolm MichelleArk - Issue: "8944" diff --git a/.changes/1.8.0/Features-20240122-145854.yaml b/.changes/1.8.0/Features-20240122-145854.yaml deleted file mode 100644 index db3c49d8a2a..00000000000 --- a/.changes/1.8.0/Features-20240122-145854.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Enable unit testing versioned models -time: 2024-01-22T14:58:54.251484-05:00 -custom: - Author: gshank - Issue: "9344" diff --git a/.changes/1.8.0/Features-20240129-114753.yaml b/.changes/1.8.0/Features-20240129-114753.yaml deleted file mode 100644 index 2acd2d57040..00000000000 --- a/.changes/1.8.0/Features-20240129-114753.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Enable list command for unit tests -time: 2024-01-29T11:47:53.696961-05:00 -custom: - Author: gshank - Issue: "8508" diff --git a/.changes/1.8.0/Features-20240131-153535.yaml b/.changes/1.8.0/Features-20240131-153535.yaml deleted file mode 100644 index 9be4d39694b..00000000000 --- a/.changes/1.8.0/Features-20240131-153535.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Integration Test Optimizations -time: 2024-01-31T15:35:35.691224-05:00 -custom: - Author: peterallenwebb - Issue: "9498" diff --git a/.changes/1.8.0/Features-20240201-154956.yaml b/.changes/1.8.0/Features-20240201-154956.yaml deleted file mode 100644 index b87d3009e85..00000000000 --- a/.changes/1.8.0/Features-20240201-154956.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Accelerate integration tests with caching. -time: 2024-02-01T15:49:56.422651-05:00 -custom: - Author: peterallenwebb - Issue: "9498" diff --git a/.changes/1.8.0/Features-20240202-112644.yaml b/.changes/1.8.0/Features-20240202-112644.yaml deleted file mode 100644 index 95fc50e8163..00000000000 --- a/.changes/1.8.0/Features-20240202-112644.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Cache environment variables -time: 2024-02-02T11:26:44.614393-05:00 -custom: - Author: peterallenwebb - Issue: "9489" diff --git a/.changes/1.8.0/Features-20240215-120811.yaml b/.changes/1.8.0/Features-20240215-120811.yaml deleted file mode 100644 index 14ce1aee016..00000000000 --- a/.changes/1.8.0/Features-20240215-120811.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support meta at the config level for Metric nodes -time: 2024-02-15T12:08:11.927789-06:00 -custom: - Author: emmyoop - Issue: "9441" diff --git a/.changes/1.8.0/Features-20240215-145814.yaml b/.changes/1.8.0/Features-20240215-145814.yaml deleted file mode 100644 index 644852ca0e8..00000000000 --- a/.changes/1.8.0/Features-20240215-145814.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add cache to SavedQuery config -time: 2024-02-15T14:58:14.834651-06:00 -custom: - Author: emmyoop - Issue: "9540" diff --git a/.changes/1.8.0/Features-20240307-153622.yaml b/.changes/1.8.0/Features-20240307-153622.yaml deleted file mode 100644 index 80886a82c9b..00000000000 --- a/.changes/1.8.0/Features-20240307-153622.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support scrubbing secret vars -time: 2024-03-07T15:36:22.754627+01:00 -custom: - Author: nielspardon - Issue: "7247" diff --git a/.changes/1.8.0/Features-20240312-140407.yaml b/.changes/1.8.0/Features-20240312-140407.yaml deleted file mode 100644 index a73c3bc1c85..00000000000 --- a/.changes/1.8.0/Features-20240312-140407.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Allow excluding resource types for build, list, and clone commands, and provide env vars -time: 2024-03-12T14:04:07.086017-04:00 -custom: - Author: gshank - Issue: "9237" diff --git a/.changes/1.8.0/Features-20240315-161209.yaml b/.changes/1.8.0/Features-20240315-161209.yaml deleted file mode 100644 index 4a428b973db..00000000000 --- a/.changes/1.8.0/Features-20240315-161209.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Features -body: SourceDefinition.meta represents source-level and table-level meta properties, - instead of only table-level -time: 2024-03-15T16:12:09.789935-04:00 -custom: - Author: michelleark - Issue: "9766" diff --git a/.changes/1.8.0/Features-20240322-103124.yaml b/.changes/1.8.0/Features-20240322-103124.yaml deleted file mode 100644 index 735dd44bdbc..00000000000 --- a/.changes/1.8.0/Features-20240322-103124.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Allow metrics in semantic layer filters. -time: 2024-03-22T10:31:24.76978-07:00 -custom: - Author: courtneyholcomb - Issue: "9804" diff --git a/.changes/1.8.0/Features-20240323-201230.yaml b/.changes/1.8.0/Features-20240323-201230.yaml deleted file mode 100644 index 3f981ecc7b3..00000000000 --- a/.changes/1.8.0/Features-20240323-201230.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add wildcard support to the group selector method -time: 2024-03-23T20:12:30.715975-04:00 -custom: - Author: heysweet - Issue: "9811" diff --git a/.changes/1.8.0/Features-20240404-170728.yaml b/.changes/1.8.0/Features-20240404-170728.yaml deleted file mode 100644 index 6db7735acbc..00000000000 --- a/.changes/1.8.0/Features-20240404-170728.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: 'source freshness precomputes metadata-based freshness in batch, if possible ' -time: 2024-04-04T17:07:28.717868-07:00 -custom: - Author: michelleark - Issue: "8705" diff --git a/.changes/1.8.0/Features-20240405-175733.yaml b/.changes/1.8.0/Features-20240405-175733.yaml deleted file mode 100644 index 0346361fc15..00000000000 --- a/.changes/1.8.0/Features-20240405-175733.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Better error message when trying to select a disabled model -time: 2024-04-05T17:57:33.047963+02:00 -custom: - Author: SamuelBFavarin - Issue: "9747" diff --git a/.changes/1.8.0/Features-20240408-094132.yaml b/.changes/1.8.0/Features-20240408-094132.yaml deleted file mode 100644 index 0b7a251e926..00000000000 --- a/.changes/1.8.0/Features-20240408-094132.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Support SQL in unit testing fixtures -time: 2024-04-08T09:41:32.15936-04:00 -custom: - Author: gshank - Issue: "9405" diff --git a/.changes/1.8.0/Fixes-20231013-130943.yaml b/.changes/1.8.0/Fixes-20231013-130943.yaml deleted file mode 100644 index db89350ac9f..00000000000 --- a/.changes/1.8.0/Fixes-20231013-130943.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: For packages installed with tarball method, fetch metadata to resolve nested dependencies -time: 2023-10-13T13:09:43.188308-04:00 -custom: - Author: adamlopez - Issue: "8621" diff --git a/.changes/1.8.0/Fixes-20231016-163953.yaml b/.changes/1.8.0/Fixes-20231016-163953.yaml deleted file mode 100644 index ea21584bee1..00000000000 --- a/.changes/1.8.0/Fixes-20231016-163953.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix partial parsing not working for semantic model change -time: 2023-10-16T16:39:53.05058-07:00 -custom: - Author: ChenyuLInx - Issue: "8859" diff --git a/.changes/1.8.0/Fixes-20231024-110151.yaml b/.changes/1.8.0/Fixes-20231024-110151.yaml deleted file mode 100644 index 711f431be8a..00000000000 --- a/.changes/1.8.0/Fixes-20231024-110151.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Handle unknown `type_code` for model contracts -time: 2023-10-24T11:01:51.980781-06:00 -custom: - Author: dbeatty10 - Issue: 8877 8353 diff --git a/.changes/1.8.0/Fixes-20231024-145504.yaml b/.changes/1.8.0/Fixes-20231024-145504.yaml deleted file mode 100644 index b7dac02c150..00000000000 --- a/.changes/1.8.0/Fixes-20231024-145504.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Add back contract enforcement for temporary tables on postgres -time: 2023-10-24T14:55:04.051683-05:00 -custom: - Author: emmyoop - Issue: "8857" diff --git a/.changes/1.8.0/Fixes-20231024-155400.yaml b/.changes/1.8.0/Fixes-20231024-155400.yaml deleted file mode 100644 index cd10f06d005..00000000000 --- a/.changes/1.8.0/Fixes-20231024-155400.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Rework get_catalog implementation to retain previous adapter interface semantics -time: 2023-10-24T15:54:00.628086-04:00 -custom: - Author: peterallenwebb - Issue: "8846" diff --git a/.changes/1.8.0/Fixes-20231026-002536.yaml b/.changes/1.8.0/Fixes-20231026-002536.yaml deleted file mode 100644 index f14c9ec0e0b..00000000000 --- a/.changes/1.8.0/Fixes-20231026-002536.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Add version to fqn when version==0 -time: 2023-10-26T00:25:36.259356-05:00 -custom: - Author: aranke - Issue: "8836" diff --git a/.changes/1.8.0/Fixes-20231030-093734.yaml b/.changes/1.8.0/Fixes-20231030-093734.yaml deleted file mode 100644 index 7322dd5042b..00000000000 --- a/.changes/1.8.0/Fixes-20231030-093734.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix cased comparison in catalog-retrieval function. -time: 2023-10-30T09:37:34.258612-04:00 -custom: - Author: peterallenwebb - Issue: "8939" diff --git a/.changes/1.8.0/Fixes-20231031-005345.yaml b/.changes/1.8.0/Fixes-20231031-005345.yaml deleted file mode 100644 index 56afd9f324e..00000000000 --- a/.changes/1.8.0/Fixes-20231031-005345.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Catalog queries now assign the correct type to materialized views -time: 2023-10-31T00:53:45.486203-04:00 -custom: - Author: mikealfare - Issue: "8864" diff --git a/.changes/1.8.0/Fixes-20231031-144837.yaml b/.changes/1.8.0/Fixes-20231031-144837.yaml deleted file mode 100644 index 64b15e29dc9..00000000000 --- a/.changes/1.8.0/Fixes-20231031-144837.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix compilation exception running empty seed file and support new Integer agate data_type -time: 2023-10-31T14:48:37.774871-04:00 -custom: - Author: gshank - Issue: "8895" diff --git a/.changes/1.8.0/Fixes-20231101-155824.yaml b/.changes/1.8.0/Fixes-20231101-155824.yaml deleted file mode 100644 index dba6679a53e..00000000000 --- a/.changes/1.8.0/Fixes-20231101-155824.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Make relation filtering None-tolerant for maximal flexibility across adapters. -time: 2023-11-01T15:58:24.552054-04:00 -custom: - Author: peterallenwebb - Issue: "8974" diff --git a/.changes/1.8.0/Fixes-20231106-155933.yaml b/.changes/1.8.0/Fixes-20231106-155933.yaml deleted file mode 100644 index 9e4201e4962..00000000000 --- a/.changes/1.8.0/Fixes-20231106-155933.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Update run_results.json from previous versions of dbt to support deferral and - rerun from failure -time: 2023-11-06T15:59:33.677915-05:00 -custom: - Author: jtcohen6 peterallenwebb - Issue: "9010" diff --git a/.changes/1.8.0/Fixes-20231107-092358.yaml b/.changes/1.8.0/Fixes-20231107-092358.yaml deleted file mode 100644 index 24b947a993f..00000000000 --- a/.changes/1.8.0/Fixes-20231107-092358.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix git repository with subdirectory for Deps -time: 2023-11-07T09:23:58.214271-08:00 -custom: - Author: ChenyuLInx - Issue: "9000" diff --git a/.changes/1.8.0/Fixes-20231107-094130.yaml b/.changes/1.8.0/Fixes-20231107-094130.yaml deleted file mode 100644 index c32d62de52f..00000000000 --- a/.changes/1.8.0/Fixes-20231107-094130.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Use MANIFEST.in to recursively include all jinja templates; fixes issue where - some templates were not included in the distribution -time: 2023-11-07T09:41:30.121733-05:00 -custom: - Author: mikealfare - Issue: "9016" diff --git a/.changes/1.8.0/Fixes-20231113-114956.yaml b/.changes/1.8.0/Fixes-20231113-114956.yaml deleted file mode 100644 index 77bb210002c..00000000000 --- a/.changes/1.8.0/Fixes-20231113-114956.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix formatting of tarball information in packages-lock.yml -time: 2023-11-13T11:49:56.437007-08:00 -custom: - Author: ChenyuLInx QMalcolm - Issue: "9062" diff --git a/.changes/1.8.0/Fixes-20231113-154535.yaml b/.changes/1.8.0/Fixes-20231113-154535.yaml deleted file mode 100644 index f352830921f..00000000000 --- a/.changes/1.8.0/Fixes-20231113-154535.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Use seed file from disk for unit testing if rows not specified in YAML config -time: 2023-11-13T15:45:35.008565Z -custom: - Author: aranke - Issue: "8652" diff --git a/.changes/1.8.0/Fixes-20231127-154310.yaml b/.changes/1.8.0/Fixes-20231127-154310.yaml deleted file mode 100644 index 57dde2cb9fd..00000000000 --- a/.changes/1.8.0/Fixes-20231127-154310.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: 'deps: Lock git packages to commit SHA during resolution' -time: 2023-11-27T15:43:10.122069+01:00 -custom: - Author: jtcohen6 - Issue: "9050" diff --git a/.changes/1.8.0/Fixes-20231127-154347.yaml b/.changes/1.8.0/Fixes-20231127-154347.yaml deleted file mode 100644 index 9b2078b1c52..00000000000 --- a/.changes/1.8.0/Fixes-20231127-154347.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: 'deps: Use PackageRenderer to read package-lock.json' -time: 2023-11-27T15:43:47.842423+01:00 -custom: - Author: jtcohen6 - Issue: "9127" diff --git a/.changes/1.8.0/Fixes-20231127-165244.yaml b/.changes/1.8.0/Fixes-20231127-165244.yaml deleted file mode 100644 index 87147eb6305..00000000000 --- a/.changes/1.8.0/Fixes-20231127-165244.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, - and sources -time: 2023-11-27T16:52:44.590313-08:00 -custom: - Author: QMalcolm - Issue: "8991" diff --git a/.changes/1.8.0/Fixes-20231128-102111.yaml b/.changes/1.8.0/Fixes-20231128-102111.yaml deleted file mode 100644 index c3371708a11..00000000000 --- a/.changes/1.8.0/Fixes-20231128-102111.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix parsing f-strings in python models -time: 2023-11-28T10:21:11.596121-08:00 -custom: - Author: mederka - Issue: "6976" diff --git a/.changes/1.8.0/Fixes-20231128-155225.yaml b/.changes/1.8.0/Fixes-20231128-155225.yaml deleted file mode 100644 index 4b24cc37c25..00000000000 --- a/.changes/1.8.0/Fixes-20231128-155225.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: 'Get sources working again in dbt docs generate' -time: 2023-11-28T15:52:25.738256Z -custom: - Author: aranke - Issue: "9119" diff --git a/.changes/1.8.0/Fixes-20231213-220449.yaml b/.changes/1.8.0/Fixes-20231213-220449.yaml deleted file mode 100644 index 6da9f7ddcaa..00000000000 --- a/.changes/1.8.0/Fixes-20231213-220449.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Preserve the value of vars and the --full-refresh flags when using retry. -time: 2023-12-13T22:04:49.228294-05:00 -custom: - Author: peterallenwebb, ChenyuLInx - Issue: "9112" diff --git a/.changes/1.8.0/Fixes-20240106-003649.yaml b/.changes/1.8.0/Fixes-20240106-003649.yaml deleted file mode 100644 index b41086fb4d7..00000000000 --- a/.changes/1.8.0/Fixes-20240106-003649.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: fix lock-file bad indentation -time: 2024-01-06T00:36:49.547533+09:00 -custom: - Author: jx2lee - Issue: "9319" diff --git a/.changes/1.8.0/Fixes-20240108-232035.yaml b/.changes/1.8.0/Fixes-20240108-232035.yaml deleted file mode 100644 index 227332f7af1..00000000000 --- a/.changes/1.8.0/Fixes-20240108-232035.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS -time: 2024-01-08T23:20:35.339102+09:00 -custom: - Author: jx2lee - Issue: "7761" diff --git a/.changes/1.8.0/Fixes-20240115-165310.yaml b/.changes/1.8.0/Fixes-20240115-165310.yaml deleted file mode 100644 index d05064bba61..00000000000 --- a/.changes/1.8.0/Fixes-20240115-165310.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Support reasonably long unit test names -time: 2024-01-15T16:53:10.42761-05:00 -custom: - Author: michelleark - Issue: "9015" diff --git a/.changes/1.8.0/Fixes-20240119-215214.yaml b/.changes/1.8.0/Fixes-20240119-215214.yaml deleted file mode 100644 index 90fc309c667..00000000000 --- a/.changes/1.8.0/Fixes-20240119-215214.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Fix back-compat parsing for model-level 'tests', source table-level 'tests', - and 'tests' defined on model versions -time: 2024-01-19T21:52:14.090462+01:00 -custom: - Author: jtcohen6 - Issue: "9411" diff --git a/.changes/1.8.0/Fixes-20240124-142522.yaml b/.changes/1.8.0/Fixes-20240124-142522.yaml deleted file mode 100644 index 40a8b1a6aea..00000000000 --- a/.changes/1.8.0/Fixes-20240124-142522.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix retry command run from CLI -time: 2024-01-24T14:25:22.846199-08:00 -custom: - Author: ChenyuLInx - Issue: "9444" diff --git a/.changes/1.8.0/Fixes-20240125-155641.yaml b/.changes/1.8.0/Fixes-20240125-155641.yaml deleted file mode 100644 index c5e3b45d06f..00000000000 --- a/.changes/1.8.0/Fixes-20240125-155641.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix seed and source selection in `dbt docs generate` -time: 2024-01-25T15:56:41.557934-05:00 -custom: - Author: michelleark - Issue: "9161" diff --git a/.changes/1.8.0/Fixes-20240125-182243.yaml b/.changes/1.8.0/Fixes-20240125-182243.yaml deleted file mode 100644 index e6300cad13c..00000000000 --- a/.changes/1.8.0/Fixes-20240125-182243.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection - during docs generate -time: 2024-01-25T18:22:43.253228-05:00 -custom: - Author: michelleark - Issue: "9456" diff --git a/.changes/1.8.0/Fixes-20240126-134234.yaml b/.changes/1.8.0/Fixes-20240126-134234.yaml deleted file mode 100644 index 1d38d339490..00000000000 --- a/.changes/1.8.0/Fixes-20240126-134234.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix node type plurals in FoundStats log message -time: 2024-01-26T13:42:34.651033+01:00 -custom: - Author: jtcohen6 - Issue: "9464" diff --git a/.changes/1.8.0/Fixes-20240130-124135.yaml b/.changes/1.8.0/Fixes-20240130-124135.yaml deleted file mode 100644 index 2ea4ff240c3..00000000000 --- a/.changes/1.8.0/Fixes-20240130-124135.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Run manifest upgrade preprocessing on any older manifest version, including - v11 -time: 2024-01-30T12:41:35.899412+01:00 -custom: - Author: jtcohen6 - Issue: "9487" diff --git a/.changes/1.8.0/Fixes-20240201-124701.yaml b/.changes/1.8.0/Fixes-20240201-124701.yaml deleted file mode 100644 index 0c0b7d837e8..00000000000 --- a/.changes/1.8.0/Fixes-20240201-124701.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Update 'compiled_code' context member logic to route based on command ('clone' - or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. -time: 2024-02-01T12:47:01.488085+01:00 -custom: - Author: jtcohen6 - Issue: "9502" diff --git a/.changes/1.8.0/Fixes-20240201-164407.yaml b/.changes/1.8.0/Fixes-20240201-164407.yaml deleted file mode 100644 index a156a2a7dd5..00000000000 --- a/.changes/1.8.0/Fixes-20240201-164407.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix bug where Semantic Layer filter strings are parsed into lists. -time: 2024-02-01T16:44:07.697777-08:00 -custom: - Author: courtneyholcomb - Issue: "9507" diff --git a/.changes/1.8.0/Fixes-20240206-152435.yaml b/.changes/1.8.0/Fixes-20240206-152435.yaml deleted file mode 100644 index d5cecf873fd..00000000000 --- a/.changes/1.8.0/Fixes-20240206-152435.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix conflict with newer versions of Snowplow tracker -time: 2024-02-06T15:24:35.778891-06:00 -custom: - Author: edgarrmondragon akurdyukov - Issue: "8719" diff --git a/.changes/1.8.0/Fixes-20240206-161331.yaml b/.changes/1.8.0/Fixes-20240206-161331.yaml deleted file mode 100644 index b6a96549c4d..00000000000 --- a/.changes/1.8.0/Fixes-20240206-161331.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Initialize invocation context before test fixtures are built. -time: 2024-02-06T16:13:31.04575-05:00 -custom: - Author: peterallenwebb - Issue: '#9489' diff --git a/.changes/1.8.0/Fixes-20240207-150223.yaml b/.changes/1.8.0/Fixes-20240207-150223.yaml deleted file mode 100644 index cf34a3e9a7e..00000000000 --- a/.changes/1.8.0/Fixes-20240207-150223.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: When patching versioned models, set constraints after config -time: 2024-02-07T15:02:23.697345-05:00 -custom: - Author: gshank - Issue: "9364" diff --git a/.changes/1.8.0/Fixes-20240209-170146.yaml b/.changes/1.8.0/Fixes-20240209-170146.yaml deleted file mode 100644 index 5148219beda..00000000000 --- a/.changes/1.8.0/Fixes-20240209-170146.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: only include unmodified semantic mdodels in state:modified selection -time: 2024-02-09T17:01:46.676097-05:00 -custom: - Author: michelleark - Issue: "9548" diff --git a/.changes/1.8.0/Fixes-20240212-144733.yaml b/.changes/1.8.0/Fixes-20240212-144733.yaml deleted file mode 100644 index cdf9df56b78..00000000000 --- a/.changes/1.8.0/Fixes-20240212-144733.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Set query headers when manifest is passed in to dbtRunner -time: 2024-02-12T14:47:33.092877-05:00 -custom: - Author: gshank - Issue: "9546" diff --git a/.changes/1.8.0/Fixes-20240212-154728.yaml b/.changes/1.8.0/Fixes-20240212-154728.yaml deleted file mode 100644 index 6e8c070764f..00000000000 --- a/.changes/1.8.0/Fixes-20240212-154728.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix Semantic Model Compare node relations -time: 2024-02-12T15:47:28.752107-08:00 -custom: - Author: ChenyuLInx - Issue: "9548" diff --git a/.changes/1.8.0/Fixes-20240212-165619.yaml b/.changes/1.8.0/Fixes-20240212-165619.yaml deleted file mode 100644 index 65acaeef40a..00000000000 --- a/.changes/1.8.0/Fixes-20240212-165619.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Store node_info in node associated logging events -time: 2024-02-12T16:56:19.954358-05:00 -custom: - Author: gshank - Issue: "9557" diff --git a/.changes/1.8.0/Fixes-20240216-145632.yaml b/.changes/1.8.0/Fixes-20240216-145632.yaml deleted file mode 100644 index a02027f66a5..00000000000 --- a/.changes/1.8.0/Fixes-20240216-145632.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Tighten exception handling to avoid worker thread hangs. -time: 2024-02-16T14:56:32.858967-05:00 -custom: - Author: peterallenwebb - Issue: "9583" diff --git a/.changes/1.8.0/Fixes-20240220-165453.yaml b/.changes/1.8.0/Fixes-20240220-165453.yaml deleted file mode 100644 index 11dad8f558c..00000000000 --- a/.changes/1.8.0/Fixes-20240220-165453.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Clearer no-op logging in stubbed SavedQueryRunner -time: 2024-02-20T16:54:53.623096-05:00 -custom: - Author: jtcohen6 - Issue: "9533" diff --git a/.changes/1.8.0/Fixes-20240222-100958.yaml b/.changes/1.8.0/Fixes-20240222-100958.yaml deleted file mode 100644 index 1fb2ff46c6f..00000000000 --- a/.changes/1.8.0/Fixes-20240222-100958.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix node_info contextvar handling so incorrect node_info doesn't persist -time: 2024-02-22T10:09:58.122809-05:00 -custom: - Author: gshank - Issue: "8866" diff --git a/.changes/1.8.0/Fixes-20240223-162107.yaml b/.changes/1.8.0/Fixes-20240223-162107.yaml deleted file mode 100644 index 446cf6d077a..00000000000 --- a/.changes/1.8.0/Fixes-20240223-162107.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Add target-path to retry -time: 2024-02-23T16:21:07.83639Z -custom: - Author: aranke - Issue: "8948" diff --git a/.changes/1.8.0/Fixes-20240226-173227.yaml b/.changes/1.8.0/Fixes-20240226-173227.yaml deleted file mode 100644 index fa1bf0ab8cf..00000000000 --- a/.changes/1.8.0/Fixes-20240226-173227.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Do not add duplicate input_measures -time: 2024-02-26T17:32:27.837427-05:00 -custom: - Author: gshank - Issue: "9360" diff --git a/.changes/1.8.0/Fixes-20240228-135928.yaml b/.changes/1.8.0/Fixes-20240228-135928.yaml deleted file mode 100644 index 3b908608ef6..00000000000 --- a/.changes/1.8.0/Fixes-20240228-135928.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Throw a ParsingError if a primary key constraint is defined on multiple columns - or at both the column and model level. -time: 2024-02-28T13:59:28.728561-06:00 -custom: - Author: emmyoop - Issue: "9581" diff --git a/.changes/1.8.0/Fixes-20240229-114207.yaml b/.changes/1.8.0/Fixes-20240229-114207.yaml deleted file mode 100644 index aebb857f523..00000000000 --- a/.changes/1.8.0/Fixes-20240229-114207.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: 'Bug fix: don''t parse Jinja in filters for input metrics or measures.' -time: 2024-02-29T11:42:07.259143-08:00 -custom: - Author: courtneyholcomb - Issue: "9582" diff --git a/.changes/1.8.0/Fixes-20240301-000355.yaml b/.changes/1.8.0/Fixes-20240301-000355.yaml deleted file mode 100644 index 7172982750f..00000000000 --- a/.changes/1.8.0/Fixes-20240301-000355.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Fix traceback parsing for exceptions raised due to csv fixtures moved into or - out of fixture/subfolders. -time: 2024-03-01T00:03:55.753473609+01:00 -custom: - Author: slothkong - Issue: "9570" diff --git a/.changes/1.8.0/Fixes-20240301-135536.yaml b/.changes/1.8.0/Fixes-20240301-135536.yaml deleted file mode 100644 index 2a96bd7eeec..00000000000 --- a/.changes/1.8.0/Fixes-20240301-135536.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix partial parsing `KeyError` on deleted schema files -time: 2024-03-01T13:55:36.533176-08:00 -custom: - Author: QMalcolm - Issue: "8860" diff --git a/.changes/1.8.0/Fixes-20240307-142459.yaml b/.changes/1.8.0/Fixes-20240307-142459.yaml deleted file mode 100644 index 14c08da2816..00000000000 --- a/.changes/1.8.0/Fixes-20240307-142459.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Support saved queries in `dbt list` -time: 2024-03-07T14:24:59.530072-05:00 -custom: - Author: QMalcolm jtcohen6 - Issue: "9532" diff --git a/.changes/1.8.0/Fixes-20240312-165357.yaml b/.changes/1.8.0/Fixes-20240312-165357.yaml deleted file mode 100644 index 7a391118015..00000000000 --- a/.changes/1.8.0/Fixes-20240312-165357.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: include sources in catalog.json when over 100 relations selected for catalog - generation -time: 2024-03-12T16:53:57.714118-04:00 -custom: - Author: michelleark - Issue: "9755" diff --git a/.changes/1.8.0/Fixes-20240315-145538.yaml b/.changes/1.8.0/Fixes-20240315-145538.yaml deleted file mode 100644 index 8723734db86..00000000000 --- a/.changes/1.8.0/Fixes-20240315-145538.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Support overriding macros in packages in unit testing -time: 2024-03-15T14:55:38.958553-04:00 -custom: - Author: michelleark - Issue: "9624" diff --git a/.changes/1.8.0/Fixes-20240316-231152.yaml b/.changes/1.8.0/Fixes-20240316-231152.yaml deleted file mode 100644 index 725d8bbc3c5..00000000000 --- a/.changes/1.8.0/Fixes-20240316-231152.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Handle exceptions for failing on-run-* hooks in source freshness -time: 2024-03-16T23:11:52.819014-07:00 -custom: - Author: aranke - Issue: "9511" diff --git a/.changes/1.8.0/Fixes-20240317-005611.yaml b/.changes/1.8.0/Fixes-20240317-005611.yaml deleted file mode 100644 index 0878779174a..00000000000 --- a/.changes/1.8.0/Fixes-20240317-005611.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: 'Validation of unit test parsing for incremental models' -time: 2024-03-17T00:56:11.855232-07:00 -custom: - Author: aranke - Issue: "9593" diff --git a/.changes/1.8.0/Fixes-20240318-153338.yaml b/.changes/1.8.0/Fixes-20240318-153338.yaml deleted file mode 100644 index c1328ce8957..00000000000 --- a/.changes/1.8.0/Fixes-20240318-153338.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix use of retry command on command using defer -time: 2024-03-18T15:33:38.90058-04:00 -custom: - Author: gshank - Issue: "9770" diff --git a/.changes/1.8.0/Fixes-20240323-122018.yaml b/.changes/1.8.0/Fixes-20240323-122018.yaml deleted file mode 100644 index a165511283c..00000000000 --- a/.changes/1.8.0/Fixes-20240323-122018.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Make `args` variable to be un-modified by `dbt.invoke(args)` -time: 2024-03-23T12:20:18.170948-06:00 -custom: - Author: dbeatty10 - Issue: 8938 9787 diff --git a/.changes/1.8.0/Fixes-20240323-124558.yaml b/.changes/1.8.0/Fixes-20240323-124558.yaml deleted file mode 100644 index b36173325ba..00000000000 --- a/.changes/1.8.0/Fixes-20240323-124558.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Only create the packages-install-path / dbt_packages folder during dbt deps -time: 2024-03-23T12:45:58.159017-06:00 -custom: - Author: dbeatty10 - Issue: 6985 9584 diff --git a/.changes/1.8.0/Fixes-20240326-003411.yaml b/.changes/1.8.0/Fixes-20240326-003411.yaml deleted file mode 100644 index f5b5fe9e095..00000000000 --- a/.changes/1.8.0/Fixes-20240326-003411.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Unit test path outputs -time: 2024-03-26T00:34:11.162594Z -custom: - Author: aranke - Issue: "9608" diff --git a/.changes/1.8.0/Fixes-20240326-162100.yaml b/.changes/1.8.0/Fixes-20240326-162100.yaml deleted file mode 100644 index f4c181dbb31..00000000000 --- a/.changes/1.8.0/Fixes-20240326-162100.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Fix assorted source freshness edgecases so check is run or actionable information - is given -time: 2024-03-26T16:21:00.008936-07:00 -custom: - Author: QMalcolm - Issue: "9078" diff --git a/.changes/1.8.0/Fixes-20240327-150013.yaml b/.changes/1.8.0/Fixes-20240327-150013.yaml deleted file mode 100644 index f988dd5c1ab..00000000000 --- a/.changes/1.8.0/Fixes-20240327-150013.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: '"Fix Docker release process to account for both historical and current versions - of `dbt-postgres`' -time: 2024-03-27T15:00:13.388268-04:00 -custom: - Author: mikealfare - Issue: "9827" diff --git a/.changes/1.8.0/Fixes-20240402-135556.yaml b/.changes/1.8.0/Fixes-20240402-135556.yaml deleted file mode 100644 index b6ba62fc0f7..00000000000 --- a/.changes/1.8.0/Fixes-20240402-135556.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Exclude password-like fields for considering reparse -time: 2024-04-02T13:55:56.169953-07:00 -custom: - Author: ChenyuLInx - Issue: "9795" diff --git a/.changes/1.8.0/Fixes-20240408-130646.yaml b/.changes/1.8.0/Fixes-20240408-130646.yaml deleted file mode 100644 index 9aeaa94a27c..00000000000 --- a/.changes/1.8.0/Fixes-20240408-130646.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fixed query comments test -time: 2024-04-08T13:06:46.648144+02:00 -custom: - Author: damian3031 - Issue: "9860" diff --git a/.changes/1.8.0/Fixes-20240409-233347.yaml b/.changes/1.8.0/Fixes-20240409-233347.yaml deleted file mode 100644 index db929c16af0..00000000000 --- a/.changes/1.8.0/Fixes-20240409-233347.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Begin warning people about spaces in model names -time: 2024-04-09T23:33:47.850166-07:00 -custom: - Author: QMalcolm - Issue: "9397" diff --git a/.changes/1.8.0/Fixes-20240412-095718.yaml b/.changes/1.8.0/Fixes-20240412-095718.yaml deleted file mode 100644 index 98fb9333f8d..00000000000 --- a/.changes/1.8.0/Fixes-20240412-095718.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Disambiguiate FreshnessConfigProblem error message -time: 2024-04-12T09:57:18.417882-07:00 -custom: - Author: michelleark - Issue: "9891" diff --git a/.changes/1.8.0/Security-20240222-152445.yaml b/.changes/1.8.0/Security-20240222-152445.yaml deleted file mode 100644 index e21e013310e..00000000000 --- a/.changes/1.8.0/Security-20240222-152445.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Security -body: Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 -time: 2024-02-22T15:24:45.158305-08:00 -custom: - Author: QMalcolm - Issue: 9638 diff --git a/.changes/1.8.0/Security-20240417-141316.yaml b/.changes/1.8.0/Security-20240417-141316.yaml deleted file mode 100644 index 6611cafb443..00000000000 --- a/.changes/1.8.0/Security-20240417-141316.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Security -body: Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg -time: 2024-04-17T14:13:16.896353-05:00 -custom: - Author: emmoop - Issue: "9951" diff --git a/.changes/1.8.0/Under the Hood-20230831-164435.yaml b/.changes/1.8.0/Under the Hood-20230831-164435.yaml deleted file mode 100644 index efa8a42cece..00000000000 --- a/.changes/1.8.0/Under the Hood-20230831-164435.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Added more type annotations. -time: 2023-08-31T16:44:35.737954-04:00 -custom: - Author: peterallenwebb - Issue: "8537" diff --git a/.changes/1.8.0/Under the Hood-20230912-190506.yaml b/.changes/1.8.0/Under the Hood-20230912-190506.yaml deleted file mode 100644 index 41804b6c0c7..00000000000 --- a/.changes/1.8.0/Under the Hood-20230912-190506.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add unit testing functional tests -time: 2023-09-12T19:05:06.023126-04:00 -custom: - Author: gshank - Issue: "8512" diff --git a/.changes/1.8.0/Under the Hood-20231026-184953.yaml b/.changes/1.8.0/Under the Hood-20231026-184953.yaml deleted file mode 100644 index cdfbf25be5f..00000000000 --- a/.changes/1.8.0/Under the Hood-20231026-184953.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove usage of dbt.include.global_project in dbt/adapters -time: 2023-10-26T18:49:53.36449-04:00 -custom: - Author: michelleark - Issue: "8925" diff --git a/.changes/1.8.0/Under the Hood-20231027-140048.yaml b/.changes/1.8.0/Under the Hood-20231027-140048.yaml deleted file mode 100644 index 1baa6adf97f..00000000000 --- a/.changes/1.8.0/Under the Hood-20231027-140048.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add a no-op runner for Saved Qeury -time: 2023-10-27T14:00:48.4755-07:00 -custom: - Author: ChenyuLInx - Issue: "8893" diff --git a/.changes/1.8.0/Under the Hood-20231101-102758.yaml b/.changes/1.8.0/Under the Hood-20231101-102758.yaml deleted file mode 100644 index 790cd9ec2f0..00000000000 --- a/.changes/1.8.0/Under the Hood-20231101-102758.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: remove dbt.flags.MP_CONTEXT usage in dbt/adapters -time: 2023-11-01T10:27:58.790153-04:00 -custom: - Author: michelleark - Issue: "8967" diff --git a/.changes/1.8.0/Under the Hood-20231101-173124.yaml b/.changes/1.8.0/Under the Hood-20231101-173124.yaml deleted file mode 100644 index 5a4656645f4..00000000000 --- a/.changes/1.8.0/Under the Hood-20231101-173124.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: 'Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters' -time: 2023-11-01T17:31:24.974093-04:00 -custom: - Author: michelleark - Issue: "8969" diff --git a/.changes/1.8.0/Under the Hood-20231103-195222.yaml b/.changes/1.8.0/Under the Hood-20231103-195222.yaml deleted file mode 100644 index 6feeaebb592..00000000000 --- a/.changes/1.8.0/Under the Hood-20231103-195222.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Move CatalogRelationTypes test case to the shared test suite to be reused by - adapter maintainers -time: 2023-11-03T19:52:22.694394-04:00 -custom: - Author: mikealfare - Issue: "8952" diff --git a/.changes/1.8.0/Under the Hood-20231106-080422.yaml b/.changes/1.8.0/Under the Hood-20231106-080422.yaml deleted file mode 100644 index 44eff6517b3..00000000000 --- a/.changes/1.8.0/Under the Hood-20231106-080422.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Treat SystemExit as an interrupt if raised during node execution. -time: 2023-11-06T08:04:22.022179-05:00 -custom: - Author: benmosher - Issue: n/a diff --git a/.changes/1.8.0/Under the Hood-20231106-105730.yaml b/.changes/1.8.0/Under the Hood-20231106-105730.yaml deleted file mode 100644 index 6678ae0634d..00000000000 --- a/.changes/1.8.0/Under the Hood-20231106-105730.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Removing unused 'documentable' -time: 2023-11-06T10:57:30.694056-08:00 -custom: - Author: QMalcolm - Issue: "8871" diff --git a/.changes/1.8.0/Under the Hood-20231107-135728.yaml b/.changes/1.8.0/Under the Hood-20231107-135728.yaml deleted file mode 100644 index 025c871519a..00000000000 --- a/.changes/1.8.0/Under the Hood-20231107-135728.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove use of dbt/core exceptions in dbt/adapter -time: 2023-11-07T13:57:28.683727-08:00 -custom: - Author: colin-rogers-dbt MichelleArk - Issue: "8920" diff --git a/.changes/1.8.0/Under the Hood-20231107-191546.yaml b/.changes/1.8.0/Under the Hood-20231107-191546.yaml deleted file mode 100644 index d81c0448c63..00000000000 --- a/.changes/1.8.0/Under the Hood-20231107-191546.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Cache dbt plugin modules to improve integration test performance -time: 2023-11-07T19:15:46.170151-05:00 -custom: - Author: peterallenwebb - Issue: "9029" diff --git a/.changes/1.8.0/Under the Hood-20231108-163613.yaml b/.changes/1.8.0/Under the Hood-20231108-163613.yaml deleted file mode 100644 index 091c09bfe32..00000000000 --- a/.changes/1.8.0/Under the Hood-20231108-163613.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Consolidate deferral methods & flags -time: 2023-11-08T16:36:13.234324-05:00 -custom: - Author: jtcohen6 - Issue: 7965 8715 diff --git a/.changes/1.8.0/Under the Hood-20231111-175350.yaml b/.changes/1.8.0/Under the Hood-20231111-175350.yaml deleted file mode 100644 index c58708f7582..00000000000 --- a/.changes/1.8.0/Under the Hood-20231111-175350.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock - variance -time: 2023-11-11T17:53:50.098843-05:00 -custom: - Author: mikealfare - Issue: "9057" diff --git a/.changes/1.8.0/Under the Hood-20231116-174251.yaml b/.changes/1.8.0/Under the Hood-20231116-174251.yaml deleted file mode 100644 index 11f02a2661e..00000000000 --- a/.changes/1.8.0/Under the Hood-20231116-174251.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific - event types and protos -time: 2023-11-16T17:42:51.005023-05:00 -custom: - Author: michelleark - Issue: 8927 8918 diff --git a/.changes/1.8.0/Under the Hood-20231120-134735.yaml b/.changes/1.8.0/Under the Hood-20231120-134735.yaml deleted file mode 100644 index e312d9b1300..00000000000 --- a/.changes/1.8.0/Under the Hood-20231120-134735.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Clean up unused adaptor folders -time: 2023-11-20T13:47:35.923794-08:00 -custom: - Author: ChenyuLInx - Issue: "9123" diff --git a/.changes/1.8.0/Under the Hood-20231120-183214.yaml b/.changes/1.8.0/Under the Hood-20231120-183214.yaml deleted file mode 100644 index 570dd360d8c..00000000000 --- a/.changes/1.8.0/Under the Hood-20231120-183214.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Move column constraints into common/contracts, removing another dependency of - adapters on core. -time: 2023-11-20T18:32:14.859503-05:00 -custom: - Author: peterallenwebb - Issue: "9024" diff --git a/.changes/1.8.0/Under the Hood-20231128-170732.yaml b/.changes/1.8.0/Under the Hood-20231128-170732.yaml deleted file mode 100644 index d1afc0086cd..00000000000 --- a/.changes/1.8.0/Under the Hood-20231128-170732.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move dbt.semver to dbt.common.semver and update references. -time: 2023-11-28T17:07:32.172421-08:00 -custom: - Author: versusfacit - Issue: "9039" diff --git a/.changes/1.8.0/Under the Hood-20231130-135432.yaml b/.changes/1.8.0/Under the Hood-20231130-135432.yaml deleted file mode 100644 index bc683ea3307..00000000000 --- a/.changes/1.8.0/Under the Hood-20231130-135432.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move lowercase utils method to common -time: 2023-11-30T13:54:32.561673-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9180" diff --git a/.changes/1.8.0/Under the Hood-20231205-093544.yaml b/.changes/1.8.0/Under the Hood-20231205-093544.yaml deleted file mode 100644 index fdb150c300b..00000000000 --- a/.changes/1.8.0/Under the Hood-20231205-093544.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove usages of dbt.clients.jinja in dbt/adapters -time: 2023-12-05T09:35:44.845352+09:00 -custom: - Author: michelleark - Issue: "9205" diff --git a/.changes/1.8.0/Under the Hood-20231205-120559.yaml b/.changes/1.8.0/Under the Hood-20231205-120559.yaml deleted file mode 100644 index a209bda9f6c..00000000000 --- a/.changes/1.8.0/Under the Hood-20231205-120559.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove usage of dbt.contracts in dbt/adapters -time: 2023-12-05T12:05:59.936775+09:00 -custom: - Author: michelleark - Issue: "9208" diff --git a/.changes/1.8.0/Under the Hood-20231205-165812.yaml b/.changes/1.8.0/Under the Hood-20231205-165812.yaml deleted file mode 100644 index 8dcf402535c..00000000000 --- a/.changes/1.8.0/Under the Hood-20231205-165812.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters -time: 2023-12-05T16:58:12.932172+09:00 -custom: - Author: michelleark - Issue: "9214" diff --git a/.changes/1.8.0/Under the Hood-20231205-170725.yaml b/.changes/1.8.0/Under the Hood-20231205-170725.yaml deleted file mode 100644 index 2018825bcff..00000000000 --- a/.changes/1.8.0/Under the Hood-20231205-170725.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Introduce RelationConfig Protocol, consolidate Relation.create_from -time: 2023-12-05T17:07:25.33861+09:00 -custom: - Author: michelleark - Issue: "9215" diff --git a/.changes/1.8.0/Under the Hood-20231205-185022.yaml b/.changes/1.8.0/Under the Hood-20231205-185022.yaml deleted file mode 100644 index 7d7b8cae1c1..00000000000 --- a/.changes/1.8.0/Under the Hood-20231205-185022.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Move BaseConfig, Metadata and various other contract classes from model_config - to common/contracts/config -time: 2023-12-05T18:50:22.321229-08:00 -custom: - Author: colin-rorgers-dbt - Issue: "8919" diff --git a/.changes/1.8.0/Under the Hood-20231205-235830.yaml b/.changes/1.8.0/Under the Hood-20231205-235830.yaml deleted file mode 100644 index a533ffb4a4b..00000000000 --- a/.changes/1.8.0/Under the Hood-20231205-235830.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: remove manifest from adapter.set_relations_cache signature -time: 2023-12-05T23:58:30.920144+09:00 -custom: - Author: michelleark - Issue: "9217" diff --git a/.changes/1.8.0/Under the Hood-20231206-000343.yaml b/.changes/1.8.0/Under the Hood-20231206-000343.yaml deleted file mode 100644 index 564b61e89aa..00000000000 --- a/.changes/1.8.0/Under the Hood-20231206-000343.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: ' remove manifest from adapter catalog method signatures' -time: 2023-12-06T00:03:43.824252+09:00 -custom: - Author: michelleark - Issue: "9218" diff --git a/.changes/1.8.0/Under the Hood-20231207-111554.yaml b/.changes/1.8.0/Under the Hood-20231207-111554.yaml deleted file mode 100644 index 8dec8ed18e4..00000000000 --- a/.changes/1.8.0/Under the Hood-20231207-111554.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro -time: 2023-12-07T11:15:54.427818+09:00 -custom: - Author: michelleark - Issue: "9244" diff --git a/.changes/1.8.0/Under the Hood-20231207-224139.yaml b/.changes/1.8.0/Under the Hood-20231207-224139.yaml deleted file mode 100644 index 8c4f4fd3c1f..00000000000 --- a/.changes/1.8.0/Under the Hood-20231207-224139.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: pass query header context to MacroQueryStringSetter -time: 2023-12-07T22:41:39.498024+09:00 -custom: - Author: michelleark - Issue: 9249 9250 diff --git a/.changes/1.8.0/Under the Hood-20231208-004854.yaml b/.changes/1.8.0/Under the Hood-20231208-004854.yaml deleted file mode 100644 index e9d4a6fd9c5..00000000000 --- a/.changes/1.8.0/Under the Hood-20231208-004854.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: add macro_context_generator on adapter -time: 2023-12-08T00:48:54.506911+09:00 -custom: - Author: michelleark - Issue: "9247" diff --git a/.changes/1.8.0/Under the Hood-20231212-154842.yaml b/.changes/1.8.0/Under the Hood-20231212-154842.yaml deleted file mode 100644 index 8ae42fa6482..00000000000 --- a/.changes/1.8.0/Under the Hood-20231212-154842.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: pass mp_context to adapter factory as argument instead of import -time: 2023-12-12T15:48:42.866175-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9025" diff --git a/.changes/1.8.0/Under the Hood-20231214-122134.yaml b/.changes/1.8.0/Under the Hood-20231214-122134.yaml deleted file mode 100644 index 97f98fb0592..00000000000 --- a/.changes/1.8.0/Under the Hood-20231214-122134.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: have dbt-postgres use RelationConfig protocol for materialized views' -time: 2023-12-14T12:21:34.756973-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9292" diff --git a/.changes/1.8.0/Under the Hood-20231214-164107.yaml b/.changes/1.8.0/Under the Hood-20231214-164107.yaml deleted file mode 100644 index ded8a3b3c7c..00000000000 --- a/.changes/1.8.0/Under the Hood-20231214-164107.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: move system.py to common as dbt-bigquery relies on it to call gcloud -time: 2023-12-14T16:41:07.539814-08:00 -custom: - Author: colin-rogers-dbt - Issue: "9293" diff --git a/.changes/1.8.0/Under the Hood-20240103-145843.yaml b/.changes/1.8.0/Under the Hood-20240103-145843.yaml deleted file mode 100644 index dd7014230c5..00000000000 --- a/.changes/1.8.0/Under the Hood-20240103-145843.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Reorganizing event definitions to define core events in dbt/events rather than - dbt/common -time: 2024-01-03T14:58:43.607499-05:00 -custom: - Author: michelleark - Issue: "9152" diff --git a/.changes/1.8.0/Under the Hood-20240104-133249.yaml b/.changes/1.8.0/Under the Hood-20240104-133249.yaml deleted file mode 100644 index b3fb06a90ba..00000000000 --- a/.changes/1.8.0/Under the Hood-20240104-133249.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: move exceptions used only in dbt/common to dbt/common/exceptions -time: 2024-01-04T13:32:49.076149-05:00 -custom: - Author: michelleark - Issue: "9332" diff --git a/.changes/1.8.0/Under the Hood-20240104-135849.yaml b/.changes/1.8.0/Under the Hood-20240104-135849.yaml deleted file mode 100644 index 080016db8fb..00000000000 --- a/.changes/1.8.0/Under the Hood-20240104-135849.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove usage of dbt.adapters.factory in dbt/common -time: 2024-01-04T13:58:49.221966-05:00 -custom: - Author: michelleark - Issue: "9334" diff --git a/.changes/1.8.0/Under the Hood-20240104-165248.yaml b/.changes/1.8.0/Under the Hood-20240104-165248.yaml deleted file mode 100644 index 867107a54a8..00000000000 --- a/.changes/1.8.0/Under the Hood-20240104-165248.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Accept valid_error_names in WarnErrorOptions constructor, remove global usage - of event modules -time: 2024-01-04T16:52:48.173716-05:00 -custom: - Author: michelleark - Issue: "9337" diff --git a/.changes/1.8.0/Under the Hood-20240108-160140.yaml b/.changes/1.8.0/Under the Hood-20240108-160140.yaml deleted file mode 100644 index 2c4f9f8eaa8..00000000000 --- a/.changes/1.8.0/Under the Hood-20240108-160140.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move result objects to dbt.artifacts -time: 2024-01-08T16:01:40.20348-05:00 -custom: - Author: gshank - Issue: "9193" diff --git a/.changes/1.8.0/Under the Hood-20240109-091856.yaml b/.changes/1.8.0/Under the Hood-20240109-091856.yaml deleted file mode 100644 index 0d2c8c7c46a..00000000000 --- a/.changes/1.8.0/Under the Hood-20240109-091856.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: dbt Labs OSS standardization of docs and templates. -time: 2024-01-09T09:18:56.686698+11:00 -custom: - Author: tonayya - Issue: "9252" diff --git a/.changes/1.8.0/Under the Hood-20240110-105734.yaml b/.changes/1.8.0/Under the Hood-20240110-105734.yaml deleted file mode 100644 index 5c8b26e550e..00000000000 --- a/.changes/1.8.0/Under the Hood-20240110-105734.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add dbt-common as a dependency and remove dbt/common -time: 2024-01-10T10:57:34.054908-05:00 -custom: - Author: michelleark emmyoop - Issue: "9357" diff --git a/.changes/1.8.0/Under the Hood-20240110-161723.yaml b/.changes/1.8.0/Under the Hood-20240110-161723.yaml deleted file mode 100644 index 1ad7d8e791d..00000000000 --- a/.changes/1.8.0/Under the Hood-20240110-161723.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: move cache exceptions to dbt/adapters -time: 2024-01-10T16:17:23.845087-05:00 -custom: - Author: michelleark - Issue: "9362" diff --git a/.changes/1.8.0/Under the Hood-20240122-165446.yaml b/.changes/1.8.0/Under the Hood-20240122-165446.yaml deleted file mode 100644 index b0f95e661c9..00000000000 --- a/.changes/1.8.0/Under the Hood-20240122-165446.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Clean up macro contexts. -time: 2024-01-22T16:54:46.247418-05:00 -custom: - Author: peterallenwebb - Issue: "9422" diff --git a/.changes/1.8.0/Under the Hood-20240123-114855.yaml b/.changes/1.8.0/Under the Hood-20240123-114855.yaml deleted file mode 100644 index 4e800a06882..00000000000 --- a/.changes/1.8.0/Under the Hood-20240123-114855.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add the @requires.manifest decorator to the retry command. -time: 2024-01-23T11:48:55.627982-06:00 -custom: - Author: emmyoop - Issue: "9426" diff --git a/.changes/1.8.0/Under the Hood-20240123-142256.yaml b/.changes/1.8.0/Under the Hood-20240123-142256.yaml deleted file mode 100644 index fd3ea99ef62..00000000000 --- a/.changes/1.8.0/Under the Hood-20240123-142256.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move WritableManifest + Documentation to dbt/artifacts -time: 2024-01-23T14:22:56.488252-05:00 -custom: - Author: michelleark - Issue: 9378 9379 diff --git a/.changes/1.8.0/Under the Hood-20240125-095453.yaml b/.changes/1.8.0/Under the Hood-20240125-095453.yaml deleted file mode 100644 index 2edb1eb487b..00000000000 --- a/.changes/1.8.0/Under the Hood-20240125-095453.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Define Macro and Group resources in dbt/artifacts -time: 2024-01-25T09:54:53.974332-05:00 -custom: - Author: michelleark - Issue: 9381 9382 diff --git a/.changes/1.8.0/Under the Hood-20240126-164038.yaml b/.changes/1.8.0/Under the Hood-20240126-164038.yaml deleted file mode 100644 index 6d542ada4f7..00000000000 --- a/.changes/1.8.0/Under the Hood-20240126-164038.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move `SavedQuery` data definition to `dbt/artifacts` -time: 2024-01-26T16:40:38.790993-08:00 -custom: - Author: QMalcolm - Issue: "9386" diff --git a/.changes/1.8.0/Under the Hood-20240129-130549.yaml b/.changes/1.8.0/Under the Hood-20240129-130549.yaml deleted file mode 100644 index a961334bd94..00000000000 --- a/.changes/1.8.0/Under the Hood-20240129-130549.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Migrate data parts of `Metric` node to dbt/artifacts -time: 2024-01-29T13:05:49.04139-08:00 -custom: - Author: QMalcolm - Issue: "9383" diff --git a/.changes/1.8.0/Under the Hood-20240129-163800.yaml b/.changes/1.8.0/Under the Hood-20240129-163800.yaml deleted file mode 100644 index 0e724751aae..00000000000 --- a/.changes/1.8.0/Under the Hood-20240129-163800.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move data portion of `SemanticModel` to dbt/artifacts -time: 2024-01-29T16:38:00.245253-08:00 -custom: - Author: QMalcolm - Issue: "9387" diff --git a/.changes/1.8.0/Under the Hood-20240130-161637.yaml b/.changes/1.8.0/Under the Hood-20240130-161637.yaml deleted file mode 100644 index 17d274a8595..00000000000 --- a/.changes/1.8.0/Under the Hood-20240130-161637.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move data parts of `Exposure` class to dbt/artifacts -time: 2024-01-30T16:16:37.176038-08:00 -custom: - Author: QMalcolm - Issue: "9380" diff --git a/.changes/1.8.0/Under the Hood-20240201-125416.yaml b/.changes/1.8.0/Under the Hood-20240201-125416.yaml deleted file mode 100644 index ae1fab79ab7..00000000000 --- a/.changes/1.8.0/Under the Hood-20240201-125416.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Start using `Mergeable` from dbt-common -time: 2024-02-01T12:54:16.462414-08:00 -custom: - Author: QMalcolm - Issue: "9505" diff --git a/.changes/1.8.0/Under the Hood-20240207-122342.yaml b/.changes/1.8.0/Under the Hood-20240207-122342.yaml deleted file mode 100644 index f2e4a0ed3fe..00000000000 --- a/.changes/1.8.0/Under the Hood-20240207-122342.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move manifest nodes to artifacts -time: 2024-02-07T12:23:42.909049-05:00 -custom: - Author: gshank - Issue: "9388" diff --git a/.changes/1.8.0/Under the Hood-20240208-120620.yaml b/.changes/1.8.0/Under the Hood-20240208-120620.yaml deleted file mode 100644 index 4b429467f5b..00000000000 --- a/.changes/1.8.0/Under the Hood-20240208-120620.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Move data parts of `SourceDefinition` class to dbt/artifacts -time: 2024-02-08T12:06:20.696709-08:00 -custom: - Author: QMalcolm - Issue: "9384" diff --git a/.changes/1.8.0/Under the Hood-20240216-104002.yaml b/.changes/1.8.0/Under the Hood-20240216-104002.yaml deleted file mode 100644 index b9877144a46..00000000000 --- a/.changes/1.8.0/Under the Hood-20240216-104002.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove uses of Replaceable class -time: 2024-02-16T10:40:02.25455-06:00 -custom: - Author: emmyoop - Issue: "7802" diff --git a/.changes/1.8.0/Under the Hood-20240221-104518.yaml b/.changes/1.8.0/Under the Hood-20240221-104518.yaml deleted file mode 100644 index 56c077fcd1c..00000000000 --- a/.changes/1.8.0/Under the Hood-20240221-104518.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Restrict protobuf to major version 4. -time: 2024-02-21T10:45:18.315195-05:00 -custom: - Author: peterallenwebb - Issue: "9566" diff --git a/.changes/1.8.0/Under the Hood-20240221-145058.yaml b/.changes/1.8.0/Under the Hood-20240221-145058.yaml deleted file mode 100644 index a847bb68c53..00000000000 --- a/.changes/1.8.0/Under the Hood-20240221-145058.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Make dbt-core compatible with Python 3.12 -time: 2024-02-21T14:50:58.983559Z -custom: - Author: l1xnan aranke - Issue: "9007" diff --git a/.changes/1.8.0/Under the Hood-20240222-115245.yaml b/.changes/1.8.0/Under the Hood-20240222-115245.yaml deleted file mode 100644 index a2d1bbcac10..00000000000 --- a/.changes/1.8.0/Under the Hood-20240222-115245.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove references to dbt.tracking and dbt.flags from dbt/artifacts -time: 2024-02-22T11:52:45.044853-06:00 -custom: - Author: emmyoop - Issue: "9390" diff --git a/.changes/1.8.0/Under the Hood-20240223-092330.yaml b/.changes/1.8.0/Under the Hood-20240223-092330.yaml deleted file mode 100644 index 71e5903b4ad..00000000000 --- a/.changes/1.8.0/Under the Hood-20240223-092330.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove unused key `wildcard` from MethodName enum -time: 2024-02-23T09:23:30.029245-05:00 -custom: - Author: asweet - Issue: "9641" diff --git a/.changes/1.8.0/Under the Hood-20240223-115021.yaml b/.changes/1.8.0/Under the Hood-20240223-115021.yaml deleted file mode 100644 index ccc1a381124..00000000000 --- a/.changes/1.8.0/Under the Hood-20240223-115021.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Implement primary key inference for model nodes -time: 2024-02-23T11:50:21.257494-08:00 -custom: - Author: aliceliu - Issue: "9652" diff --git a/.changes/1.8.0/Under the Hood-20240226-141038.yaml b/.changes/1.8.0/Under the Hood-20240226-141038.yaml deleted file mode 100644 index 6ea389b997e..00000000000 --- a/.changes/1.8.0/Under the Hood-20240226-141038.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Define UnitTestDefinition resource in dbt/artifacts/resources -time: 2024-02-26T14:10:38.807154-05:00 -custom: - Author: michelleark - Issue: "9667" diff --git a/.changes/1.8.0/Under the Hood-20240226-184258.yaml b/.changes/1.8.0/Under the Hood-20240226-184258.yaml deleted file mode 100644 index 06c0f5e029a..00000000000 --- a/.changes/1.8.0/Under the Hood-20240226-184258.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest -time: 2024-02-26T18:42:58.740808-05:00 -custom: - Author: michelleark - Issue: "9567" diff --git a/.changes/1.8.0/Under the Hood-20240309-141054.yaml b/.changes/1.8.0/Under the Hood-20240309-141054.yaml deleted file mode 100644 index 4dff658a8c1..00000000000 --- a/.changes/1.8.0/Under the Hood-20240309-141054.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Improve dbt CLI speed -time: 2024-03-09T14:10:54.549618-05:00 -custom: - Author: dwreeves - Issue: "4627" diff --git a/.changes/1.8.0/Under the Hood-20240325-172059.yaml b/.changes/1.8.0/Under the Hood-20240325-172059.yaml deleted file mode 100644 index c53e1d390cb..00000000000 --- a/.changes/1.8.0/Under the Hood-20240325-172059.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Include node_info in various Result events -time: 2024-03-25T17:20:59.445718-04:00 -custom: - Author: gshank - Issue: "9619" diff --git a/.changes/1.8.0/Under the Hood-20240412-132000.yaml b/.changes/1.8.0/Under the Hood-20240412-132000.yaml deleted file mode 100644 index 794e9ca287b..00000000000 --- a/.changes/1.8.0/Under the Hood-20240412-132000.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove non dbt.artifacts dbt.* imports from dbt/artifacts -time: 2024-04-12T13:20:00.017737-07:00 -custom: - Author: michelleark - Issue: "9926" diff --git a/.changes/1.8.0/Under the Hood-20240412-134502.yaml b/.changes/1.8.0/Under the Hood-20240412-134502.yaml deleted file mode 100644 index 62d1ebb859b..00000000000 --- a/.changes/1.8.0/Under the Hood-20240412-134502.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Migrate to using `error_tag` provided by `dbt-common` -time: 2024-04-12T13:45:02.879023-07:00 -custom: - Author: QMalcolm - Issue: "9914" diff --git a/.changes/1.8.0/Under the Hood-20240416-150030.yaml b/.changes/1.8.0/Under the Hood-20240416-150030.yaml deleted file mode 100644 index b57a01a6cc6..00000000000 --- a/.changes/1.8.0/Under the Hood-20240416-150030.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add a test for semantic manifest and move test fixtures needed for it -time: 2024-04-16T15:00:30.614286-07:00 -custom: - Author: ChenyuLInx - Issue: "9665" diff --git a/.changes/unreleased/Dependencies-20240509-093717.yaml b/.changes/unreleased/Dependencies-20240509-093717.yaml new file mode 100644 index 00000000000..82094a3e122 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240509-093717.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: Remove logbook dependency +time: 2024-05-09T09:37:17.745129-05:00 +custom: + Author: emmyoop + Issue: "8027" diff --git a/.changes/unreleased/Docs-20240311-140344.yaml b/.changes/unreleased/Docs-20240311-140344.yaml new file mode 100644 index 00000000000..6a78c95ef5a --- /dev/null +++ b/.changes/unreleased/Docs-20240311-140344.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Enable display of unit tests +time: 2024-03-11T14:03:44.490834-04:00 +custom: + Author: gshank + Issue: "501" diff --git a/.changes/unreleased/Docs-20240501-021050.yaml b/.changes/unreleased/Docs-20240501-021050.yaml new file mode 100644 index 00000000000..8799a69debf --- /dev/null +++ b/.changes/unreleased/Docs-20240501-021050.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Unit tests not rendering +time: 2024-05-01T02:10:50.987412+02:00 +custom: + Author: aranke + Issue: "506" diff --git a/.changes/unreleased/Docs-20240516-223036.yaml b/.changes/unreleased/Docs-20240516-223036.yaml new file mode 100644 index 00000000000..1eb76039621 --- /dev/null +++ b/.changes/unreleased/Docs-20240516-223036.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Add support for Saved Query node +time: 2024-05-16T22:30:36.206492-07:00 +custom: + Author: ChenyuLInx + Issue: "486" diff --git a/.changes/unreleased/Features-20240506-175642.yaml b/.changes/unreleased/Features-20240506-175642.yaml new file mode 100644 index 00000000000..308be95b4d6 --- /dev/null +++ b/.changes/unreleased/Features-20240506-175642.yaml @@ -0,0 +1,6 @@ +kind: Features +body: serialize inferred primary key +time: 2024-05-06T17:56:42.757673-05:00 +custom: + Author: dave-connors-3 + Issue: "9824" diff --git a/.changes/unreleased/Features-20240507-162717.yaml b/.changes/unreleased/Features-20240507-162717.yaml new file mode 100644 index 00000000000..662902483ff --- /dev/null +++ b/.changes/unreleased/Features-20240507-162717.yaml @@ -0,0 +1,6 @@ +kind: Features +body: 'Add unit_test: selection method' +time: 2024-05-07T16:27:17.047585-04:00 +custom: + Author: michelleark + Issue: "10053" diff --git a/.changes/unreleased/Features-20240522-000309.yaml b/.changes/unreleased/Features-20240522-000309.yaml new file mode 100644 index 00000000000..d02d3be3170 --- /dev/null +++ b/.changes/unreleased/Features-20240522-000309.yaml @@ -0,0 +1,7 @@ +kind: Features +body: Maximally parallelize dbt clone + in clone command" +time: 2024-05-22T00:03:09.765977-04:00 +custom: + Author: michelleark + Issue: "7914" diff --git a/.changes/unreleased/Fixes-20230601-204157.yaml b/.changes/unreleased/Fixes-20230601-204157.yaml new file mode 100644 index 00000000000..fb6ca636e1e --- /dev/null +++ b/.changes/unreleased/Fixes-20230601-204157.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Remove unused check_new method +time: 2023-06-01T20:41:57.556342+02:00 +custom: + Author: kevinneville + Issue: "7586" diff --git a/.changes/unreleased/Fixes-20240410-181741.yaml b/.changes/unreleased/Fixes-20240410-181741.yaml deleted file mode 100644 index 66ec5e7d373..00000000000 --- a/.changes/unreleased/Fixes-20240410-181741.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Add NodeRelation to SavedQuery Export -time: 2024-04-10T18:17:41.42533+01:00 -custom: - Author: aranke - Issue: "9534" diff --git a/.changes/unreleased/Fixes-20240508-151127.yaml b/.changes/unreleased/Fixes-20240508-151127.yaml new file mode 100644 index 00000000000..4ccd18a6729 --- /dev/null +++ b/.changes/unreleased/Fixes-20240508-151127.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: 'Restore previous behavior for --favor-state: only favor defer_relation if not + selected in current command"' +time: 2024-05-08T15:11:27.510912+02:00 +custom: + Author: jtcohen6 + Issue: "10107" diff --git a/.changes/unreleased/Fixes-20240509-091411.yaml b/.changes/unreleased/Fixes-20240509-091411.yaml new file mode 100644 index 00000000000..a4c243779c5 --- /dev/null +++ b/.changes/unreleased/Fixes-20240509-091411.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Unit test fixture (csv) returns null for empty value +time: 2024-05-09T09:14:11.772709-04:00 +custom: + Author: michelleark + Issue: "9881" diff --git a/.changes/unreleased/Fixes-20240516-153913.yaml b/.changes/unreleased/Fixes-20240516-153913.yaml new file mode 100644 index 00000000000..b96f45e2c37 --- /dev/null +++ b/.changes/unreleased/Fixes-20240516-153913.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Fix json format log and --quiet for ls and jinja print by converting print call + to fire events +time: 2024-05-16T15:39:13.896723-07:00 +custom: + Author: ChenyuLInx + Issue: "8756" diff --git a/.changes/unreleased/Fixes-20240516-223510.yaml b/.changes/unreleased/Fixes-20240516-223510.yaml new file mode 100644 index 00000000000..53a0e553e7f --- /dev/null +++ b/.changes/unreleased/Fixes-20240516-223510.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Add resource type to saved_query +time: 2024-05-16T22:35:10.287514-07:00 +custom: + Author: ChenyuLInx + Issue: "10168" diff --git a/.changes/unreleased/Fixes-20240522-182855.yaml b/.changes/unreleased/Fixes-20240522-182855.yaml new file mode 100644 index 00000000000..b0963b4a6cc --- /dev/null +++ b/.changes/unreleased/Fixes-20240522-182855.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: 'Fix: Order-insensitive unit test equality assertion for expected/actual with + multiple nulls' +time: 2024-05-22T18:28:55.91733-04:00 +custom: + Author: michelleark + Issue: "10167" diff --git a/.changes/unreleased/Security-20240522-094540.yaml b/.changes/unreleased/Security-20240522-094540.yaml new file mode 100644 index 00000000000..b35f96dc084 --- /dev/null +++ b/.changes/unreleased/Security-20240522-094540.yaml @@ -0,0 +1,6 @@ +kind: Security +body: Explicitly bind to localhost in docs serve +time: 2024-05-22T09:45:40.748185-04:00 +custom: + Author: ChenyuLInx michelleark + Issue: "10209" diff --git a/.changes/unreleased/Under the Hood-20240502-154430.yaml b/.changes/unreleased/Under the Hood-20240502-154430.yaml new file mode 100644 index 00000000000..6c17df3a6c6 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240502-154430.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Clear error message for Private package in dbt-core +time: 2024-05-02T15:44:30.713097-07:00 +custom: + Author: ChenyuLInx + Issue: "10083" diff --git a/.changes/unreleased/Under the Hood-20240506-145511.yaml b/.changes/unreleased/Under the Hood-20240506-145511.yaml new file mode 100644 index 00000000000..f5bad25d797 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240506-145511.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Enable use of context in serialization +time: 2024-05-06T14:55:11.1812-04:00 +custom: + Author: gshank + Issue: "10093" diff --git a/.changes/unreleased/Under the Hood-20240519-155946.yaml b/.changes/unreleased/Under the Hood-20240519-155946.yaml new file mode 100644 index 00000000000..920c7ff860d --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240519-155946.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Make RSS high water mark measurement more accurate on Linux +time: 2024-05-19T15:59:46.700842315-04:00 +custom: + Author: peterallenwebb + Issue: "10177" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fc9001a8fa1..b596bf7293f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -95,8 +95,12 @@ jobs: python -m pip install tox tox --version - - name: Run tox - run: tox + - name: Run unit tests + uses: nick-fields/retry@v3 + with: + timeout_minutes: 10 + max_attempts: 3 + command: tox -e unit - name: Get current date if: always() @@ -135,7 +139,7 @@ jobs: - name: generate include id: generate-include run: | - INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' ) + INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-12"' ) INCLUDE_GROUPS="[" for include in ${INCLUDE[@]}; do for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do @@ -201,8 +205,12 @@ jobs: python -m pip install tox tox --version - - name: Run tests - run: tox -- --ddtrace + - name: Run integration tests + uses: nick-fields/retry@v3 + with: + timeout_minutes: 30 + max_attempts: 3 + command: tox -- --ddtrace env: PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} diff --git a/.github/workflows/schema-check.yml b/.github/workflows/schema-check.yml index 18d8f1b8830..cf573f38598 100644 --- a/.github/workflows/schema-check.yml +++ b/.github/workflows/schema-check.yml @@ -13,20 +13,18 @@ name: Artifact Schema Check on: + pull_request: + types: [ opened, reopened, labeled, unlabeled, synchronize ] + paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ] + workflow_dispatch: - pull_request: #TODO: remove before merging - push: - branches: - - "develop" - - "*.latest" - - "releases/*" # no special access is needed permissions: read-all env: LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas - SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}/schema_schanges.txt + SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}/schema_changes.txt DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com @@ -46,15 +44,32 @@ jobs: with: path: ${{ env.DBT_REPO_DIRECTORY }} + - name: Check for changes in core/dbt/artifacts + # https://github.com/marketplace/actions/paths-changes-filter + uses: dorny/paths-filter@v3 + id: check_artifact_changes + with: + filters: | + artifacts_changed: + - 'core/dbt/artifacts/**' + list-files: shell + working-directory: ${{ env.DBT_REPO_DIRECTORY }} + + - name: Succeed if no artifacts have changed + if: steps.check_artifact_changes.outputs.artifacts_changed == 'false' + run: | + echo "No artifact changes found in core/dbt/artifacts. CI check passed." + - name: Checkout schemas.getdbt.com repo + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' uses: actions/checkout@v4 with: repository: dbt-labs/schemas.getdbt.com ref: 'main' - ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }} path: ${{ env.SCHEMA_REPO_DIRECTORY }} - name: Generate current schema + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' run: | cd ${{ env.DBT_REPO_DIRECTORY }} python3 -m venv env @@ -65,26 +80,17 @@ jobs: # Copy generated schema files into the schemas.getdbt.com repo # Do a git diff to find any changes - # Ignore any date or version changes though + # Ignore any lines with date-like (yyyy-mm-dd) or version-like (x.y.z) changes - name: Compare schemas + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' run: | cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }} cd ${{ env.SCHEMA_REPO_DIRECTORY }} - diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \ - -I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary) - if [[ $(echo diff_results) ]]; then - echo $diff_results - echo "Schema changes detected!" - git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \ - -I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }} - exit 1 - else - echo "No schema changes detected" - fi + git diff -I='*[0-9]{4}-[0-9]{2}-[0-9]{2}' -I='*[0-9]+\.[0-9]+\.[0-9]+' --exit-code > ${{ env.SCHEMA_DIFF_ARTIFACT }} - name: Upload schema diff uses: actions/upload-artifact@v4 - if: ${{ failure() }} + if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }} with: - name: 'schema_schanges.txt' + name: 'schema_changes.txt' path: '${{ env.SCHEMA_DIFF_ARTIFACT }}' diff --git a/.github/workflows/structured-logging-schema-check.yml b/.github/workflows/structured-logging-schema-check.yml index 3c4b2eb0406..4934bffcaeb 100644 --- a/.github/workflows/structured-logging-schema-check.yml +++ b/.github/workflows/structured-logging-schema-check.yml @@ -94,7 +94,11 @@ jobs: # integration tests generate a ton of logs in different files. the next step will find them all. # we actually care if these pass, because the normal test run doesn't usually include many json log outputs - name: Run integration tests - run: tox -e integration -- -nauto + uses: nick-fields/retry@v3 + with: + timeout_minutes: 30 + max_attempts: 3 + command: tox -e integration -- -nauto env: PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} diff --git a/.github/workflows/test-repeater.yml b/.github/workflows/test-repeater.yml index dedaadd2097..315133336e8 100644 --- a/.github/workflows/test-repeater.yml +++ b/.github/workflows/test-repeater.yml @@ -36,7 +36,7 @@ on: type: choice options: - 'ubuntu-latest' - - 'macos-latest' + - 'macos-12' - 'windows-latest' num_runs_per_batch: description: 'Max number of times to run the test per batch. We always run 10 batches.' @@ -101,7 +101,7 @@ jobs: # mac and windows don't use make due to limitations with docker with those runners in GitHub - name: "Set up postgres (macos)" - if: inputs.os == 'macos-latest' + if: inputs.os == 'macos-12' uses: ./.github/actions/setup-postgres-macos - name: "Set up postgres (windows)" diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000000..f0695fa70f5 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,4 @@ +[settings] +profile=black +extend_skip_glob=.github/*,third-party-stubs/*,scripts/* +known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interface diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 160c9cfd0f7..2ac0c53a38c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,6 +19,10 @@ repos: exclude_types: - "markdown" - id: check-case-conflict +- repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort - repo: https://github.com/psf/black rev: 22.3.0 hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c4e9dcf37f..900460abb2e 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,303 +5,12 @@ - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry) -## dbt-core 1.8.0-b3 - April 18, 2024 - -### Features - -- Support scrubbing secret vars ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247)) -- Add wildcard support to the group selector method ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811)) -- source freshness precomputes metadata-based freshness in batch, if possible ([#8705](https://github.com/dbt-labs/dbt-core/issues/8705)) -- Better error message when trying to select a disabled model ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747)) -- Support SQL in unit testing fixtures ([#9405](https://github.com/dbt-labs/dbt-core/issues/9405)) - -### Fixes - -- fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761)) -- Fix conflict with newer versions of Snowplow tracker ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719)) -- Only create the packages-install-path / dbt_packages folder during dbt deps ([#6985](https://github.com/dbt-labs/dbt-core/issues/6985), [#9584](https://github.com/dbt-labs/dbt-core/issues/9584)) -- Exclude password-like fields for considering reparse ([#9795](https://github.com/dbt-labs/dbt-core/issues/9795)) -- Fixed query comments test ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860)) -- Begin warning people about spaces in model names ([#9397](https://github.com/dbt-labs/dbt-core/issues/9397)) -- Disambiguiate FreshnessConfigProblem error message ([#9891](https://github.com/dbt-labs/dbt-core/issues/9891)) - -### Under the Hood - -- Remove non dbt.artifacts dbt.* imports from dbt/artifacts ([#9926](https://github.com/dbt-labs/dbt-core/issues/9926)) -- Migrate to using `error_tag` provided by `dbt-common` ([#9914](https://github.com/dbt-labs/dbt-core/issues/9914)) -- Add a test for semantic manifest and move test fixtures needed for it ([#9665](https://github.com/dbt-labs/dbt-core/issues/9665)) - -### Dependencies - -- Relax pathspec upper bound version restriction ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373)) -- Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker ([#9687](https://github.com/dbt-labs/dbt-core/issues/9687)) -- Remove duplicate dependency of protobuf in dev-requirements ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830)) -- Bump black from 23.3.0 to >=24.3.0,<25.0 ([#8074](https://github.com/dbt-labs/dbt-core/issues/8074)) - -### Security - -- Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951)) - -### Contributors -- [@SamuelBFavarin](https://github.com/SamuelBFavarin) ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747)) -- [@akurdyukov](https://github.com/akurdyukov) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719)) -- [@damian3031](https://github.com/damian3031) ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860)) -- [@edgarrmondragon](https://github.com/edgarrmondragon) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719)) -- [@emmoop](https://github.com/emmoop) ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951)) -- [@heysweet](https://github.com/heysweet) ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811)) -- [@jx2lee](https://github.com/jx2lee) ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761)) -- [@nielspardon](https://github.com/nielspardon) ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247)) -- [@niteshy](https://github.com/niteshy) ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830)) -- [@rzjfr](https://github.com/rzjfr) ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373)) - - -## dbt-core 1.8.0-b2 - April 03, 2024 - -### Features - -- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798)) -- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237)) -- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766)) -- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804)) - -### Fixes - -- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319)) -- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583)) -- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360)) -- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581)) -- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582)) -- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570)) -- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860)) -- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532)) -- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755)) -- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624)) -- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511)) -- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593)) -- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770)) -- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787)) -- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608)) -- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078)) -- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827)) - -### Docs - -- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430)) - -### Under the Hood - -- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641)) -- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627)) -- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619)) - -### Dependencies - -- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/pull/9470)) -- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/pull/9566)) -- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/pull/9659)) - -### Contributors -- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641)) -- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430)) -- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798)) -- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582)) -- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627)) -- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319)) -- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570)) - -## dbt-core 1.8.0-b1 - February 28, 2024 - -### Breaking Changes - -- Remove adapter.get_compiler interface ([#9148](https://github.com/dbt-labs/dbt-core/issues/9148)) -- Move AdapterLogger to adapters folder ([#9151](https://github.com/dbt-labs/dbt-core/issues/9151)) -- Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps --lock' ([#9100](https://github.com/dbt-labs/dbt-core/issues/9100)) -- move event manager setup back to core, remove ref to global EVENT_MANAGER and clean up event manager functions ([#9150](https://github.com/dbt-labs/dbt-core/issues/9150)) -- Remove dbt-tests-adapter and dbt-postgres packages from dbt-core ([#9455](https://github.com/dbt-labs/dbt-core/issues/9455)) - -### Features - -- Initial implementation of unit testing ([#8287](https://github.com/dbt-labs/dbt-core/issues/8287)) -- Unit test manifest artifacts and selection ([#8295](https://github.com/dbt-labs/dbt-core/issues/8295)) -- Support config with tags & meta for unit tests ([#8294](https://github.com/dbt-labs/dbt-core/issues/8294)) -- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859)) -- Enable inline csv fixtures in unit tests ([#8626](https://github.com/dbt-labs/dbt-core/issues/8626)) -- Add drop_schema_named macro ([#8025](https://github.com/dbt-labs/dbt-core/issues/8025)) -- migrate utils to common and adapters folders ([#8924](https://github.com/dbt-labs/dbt-core/issues/8924)) -- Move Agate helper client into common ([#8926](https://github.com/dbt-labs/dbt-core/issues/8926)) -- remove usage of dbt.config.PartialProject from dbt/adapters ([#8928](https://github.com/dbt-labs/dbt-core/issues/8928)) -- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892)) -- Support unit testing incremental models ([#8422](https://github.com/dbt-labs/dbt-core/issues/8422)) -- Add support of csv file fixtures to unit testing ([#8290](https://github.com/dbt-labs/dbt-core/issues/8290)) -- Remove legacy logger ([#8027](https://github.com/dbt-labs/dbt-core/issues/8027)) -- Unit tests support --defer and state:modified ([#8517](https://github.com/dbt-labs/dbt-core/issues/8517)) -- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956)) -- Support source inputs in unit tests ([#8507](https://github.com/dbt-labs/dbt-core/issues/8507)) -- Use daff to render diff displayed in stdout when unit test fails ([#8558](https://github.com/dbt-labs/dbt-core/issues/8558)) -- Move unit testing to test command ([#8979](https://github.com/dbt-labs/dbt-core/issues/8979)) -- Support --empty flag for schema-only dry runs ([#8971](https://github.com/dbt-labs/dbt-core/issues/8971)) -- Support unit tests in non-root packages ([#8285](https://github.com/dbt-labs/dbt-core/issues/8285)) -- Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. in schema files. ([#8699](https://github.com/dbt-labs/dbt-core/issues/8699)) -- Make fixture files full-fledged parts of the manifest and enable partial parsing ([#9067](https://github.com/dbt-labs/dbt-core/issues/9067)) -- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203)) -- In build command run unit tests before models ([#9128](https://github.com/dbt-labs/dbt-core/issues/9128)) -- Move flags from UserConfig in profiles.yml to flags in dbt_project.yml ([#9183](https://github.com/dbt-labs/dbt-core/issues/9183)) -- Added hook support for `dbt source freshness` ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609)) -- Align with order of unit test output when `actual` differs from `expected` ([#9370](https://github.com/dbt-labs/dbt-core/issues/9370)) -- Added support for external nodes in unit test nodes ([#8944](https://github.com/dbt-labs/dbt-core/issues/8944)) -- Enable unit testing versioned models ([#9344](https://github.com/dbt-labs/dbt-core/issues/9344)) -- Enable list command for unit tests ([#8508](https://github.com/dbt-labs/dbt-core/issues/8508)) -- Integration Test Optimizations ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498)) -- Accelerate integration tests with caching. ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498)) -- Cache environment variables ([#9489](https://github.com/dbt-labs/dbt-core/issues/9489)) -- Support meta at the config level for Metric nodes ([#9441](https://github.com/dbt-labs/dbt-core/issues/9441)) -- Add cache to SavedQuery config ([#9540](https://github.com/dbt-labs/dbt-core/issues/9540)) - -### Fixes - -- For packages installed with tarball method, fetch metadata to resolve nested dependencies ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621)) -- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859)) -- Handle unknown `type_code` for model contracts ([#8877](https://github.com/dbt-labs/dbt-core/issues/8877), [#8353](https://github.com/dbt-labs/dbt-core/issues/8353)) -- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846)) -- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857)) -- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836)) -- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939)) -- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864)) -- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895)) -- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974)) -- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010)) -- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016)) -- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000)) -- Use seed file from disk for unit testing if rows not specified in YAML config ([#8652](https://github.com/dbt-labs/dbt-core/issues/8652)) -- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062)) -- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050)) -- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127)) -- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991)) -- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119)) -- Fix parsing f-strings in python models ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976)) -- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112)) -- Support reasonably long unit test names ([#9015](https://github.com/dbt-labs/dbt-core/issues/9015)) -- Fix back-compat parsing for model-level 'tests', source table-level 'tests', and 'tests' defined on model versions ([#9411](https://github.com/dbt-labs/dbt-core/issues/9411)) -- Fix retry command run from CLI ([#9444](https://github.com/dbt-labs/dbt-core/issues/9444)) -- Fix seed and source selection in `dbt docs generate` ([#9161](https://github.com/dbt-labs/dbt-core/issues/9161)) -- Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection during docs generate ([#9456](https://github.com/dbt-labs/dbt-core/issues/9456)) -- Fix node type plurals in FoundStats log message ([#9464](https://github.com/dbt-labs/dbt-core/issues/9464)) -- Run manifest upgrade preprocessing on any older manifest version, including v11 ([#9487](https://github.com/dbt-labs/dbt-core/issues/9487)) -- Update 'compiled_code' context member logic to route based on command ('clone' or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. ([#9502](https://github.com/dbt-labs/dbt-core/issues/9502)) -- Fix bug where Semantic Layer filter strings are parsed into lists. ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507)) -- Initialize invocation context before test fixtures are built. ([##9489](https://github.com/dbt-labs/dbt-core/issues/#9489)) -- When patching versioned models, set constraints after config ([#9364](https://github.com/dbt-labs/dbt-core/issues/9364)) -- only include unmodified semantic mdodels in state:modified selection ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548)) -- Set query headers when manifest is passed in to dbtRunner ([#9546](https://github.com/dbt-labs/dbt-core/issues/9546)) -- Store node_info in node associated logging events ([#9557](https://github.com/dbt-labs/dbt-core/issues/9557)) -- Fix Semantic Model Compare node relations ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548)) -- Clearer no-op logging in stubbed SavedQueryRunner ([#9533](https://github.com/dbt-labs/dbt-core/issues/9533)) -- Fix node_info contextvar handling so incorrect node_info doesn't persist ([#8866](https://github.com/dbt-labs/dbt-core/issues/8866)) -- Add target-path to retry ([#8948](https://github.com/dbt-labs/dbt-core/issues/8948)) - -### Docs - -- fix get_custom_database docstring ([dbt-docs/#9003](https://github.com/dbt-labs/dbt-docs/issues/9003)) - -### Under the Hood - -- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537)) -- Add unit testing functional tests ([#8512](https://github.com/dbt-labs/dbt-core/issues/8512)) -- Remove usage of dbt.include.global_project in dbt/adapters ([#8925](https://github.com/dbt-labs/dbt-core/issues/8925)) -- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893)) -- remove dbt.flags.MP_CONTEXT usage in dbt/adapters ([#8967](https://github.com/dbt-labs/dbt-core/issues/8967)) -- Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters ([#8969](https://github.com/dbt-labs/dbt-core/issues/8969)) -- Move CatalogRelationTypes test case to the shared test suite to be reused by adapter maintainers ([#8952](https://github.com/dbt-labs/dbt-core/issues/8952)) -- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a)) -- Removing unused 'documentable' ([#8871](https://github.com/dbt-labs/dbt-core/issues/8871)) -- Remove use of dbt/core exceptions in dbt/adapter ([#8920](https://github.com/dbt-labs/dbt-core/issues/8920)) -- Cache dbt plugin modules to improve integration test performance ([#9029](https://github.com/dbt-labs/dbt-core/issues/9029)) -- Consolidate deferral methods & flags ([#7965](https://github.com/dbt-labs/dbt-core/issues/7965), [#8715](https://github.com/dbt-labs/dbt-core/issues/8715)) -- Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock variance ([#9057](https://github.com/dbt-labs/dbt-core/issues/9057)) -- Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific event types and protos ([#8927](https://github.com/dbt-labs/dbt-core/issues/8927), [#8918](https://github.com/dbt-labs/dbt-core/issues/8918)) -- Clean up unused adaptor folders ([#9123](https://github.com/dbt-labs/dbt-core/issues/9123)) -- Move column constraints into common/contracts, removing another dependency of adapters on core. ([#9024](https://github.com/dbt-labs/dbt-core/issues/9024)) -- Move dbt.semver to dbt.common.semver and update references. ([#9039](https://github.com/dbt-labs/dbt-core/issues/9039)) -- Move lowercase utils method to common ([#9180](https://github.com/dbt-labs/dbt-core/issues/9180)) -- Remove usages of dbt.clients.jinja in dbt/adapters ([#9205](https://github.com/dbt-labs/dbt-core/issues/9205)) -- Remove usage of dbt.contracts in dbt/adapters ([#9208](https://github.com/dbt-labs/dbt-core/issues/9208)) -- Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters ([#9214](https://github.com/dbt-labs/dbt-core/issues/9214)) -- Introduce RelationConfig Protocol, consolidate Relation.create_from ([#9215](https://github.com/dbt-labs/dbt-core/issues/9215)) -- remove manifest from adapter.set_relations_cache signature ([#9217](https://github.com/dbt-labs/dbt-core/issues/9217)) -- remove manifest from adapter catalog method signatures ([#9218](https://github.com/dbt-labs/dbt-core/issues/9218)) -- Move BaseConfig, Metadata and various other contract classes from model_config to common/contracts/config ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919)) -- Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro ([#9244](https://github.com/dbt-labs/dbt-core/issues/9244)) -- pass query header context to MacroQueryStringSetter ([#9249](https://github.com/dbt-labs/dbt-core/issues/9249), [#9250](https://github.com/dbt-labs/dbt-core/issues/9250)) -- add macro_context_generator on adapter ([#9247](https://github.com/dbt-labs/dbt-core/issues/9247)) -- pass mp_context to adapter factory as argument instead of import ([#9025](https://github.com/dbt-labs/dbt-core/issues/9025)) -- have dbt-postgres use RelationConfig protocol for materialized views' ([#9292](https://github.com/dbt-labs/dbt-core/issues/9292)) -- move system.py to common as dbt-bigquery relies on it to call gcloud ([#9293](https://github.com/dbt-labs/dbt-core/issues/9293)) -- Reorganizing event definitions to define core events in dbt/events rather than dbt/common ([#9152](https://github.com/dbt-labs/dbt-core/issues/9152)) -- move exceptions used only in dbt/common to dbt/common/exceptions ([#9332](https://github.com/dbt-labs/dbt-core/issues/9332)) -- Remove usage of dbt.adapters.factory in dbt/common ([#9334](https://github.com/dbt-labs/dbt-core/issues/9334)) -- Accept valid_error_names in WarnErrorOptions constructor, remove global usage of event modules ([#9337](https://github.com/dbt-labs/dbt-core/issues/9337)) -- Move result objects to dbt.artifacts ([#9193](https://github.com/dbt-labs/dbt-core/issues/9193)) -- dbt Labs OSS standardization of docs and templates. ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252)) -- Add dbt-common as a dependency and remove dbt/common ([#9357](https://github.com/dbt-labs/dbt-core/issues/9357)) -- move cache exceptions to dbt/adapters ([#9362](https://github.com/dbt-labs/dbt-core/issues/9362)) -- Clean up macro contexts. ([#9422](https://github.com/dbt-labs/dbt-core/issues/9422)) -- Add the @requires.manifest decorator to the retry command. ([#9426](https://github.com/dbt-labs/dbt-core/issues/9426)) -- Move WritableManifest + Documentation to dbt/artifacts ([#9378](https://github.com/dbt-labs/dbt-core/issues/9378), [#9379](https://github.com/dbt-labs/dbt-core/issues/9379)) -- Define Macro and Group resources in dbt/artifacts ([#9381](https://github.com/dbt-labs/dbt-core/issues/9381), [#9382](https://github.com/dbt-labs/dbt-core/issues/9382)) -- Move `SavedQuery` data definition to `dbt/artifacts` ([#9386](https://github.com/dbt-labs/dbt-core/issues/9386)) -- Migrate data parts of `Metric` node to dbt/artifacts ([#9383](https://github.com/dbt-labs/dbt-core/issues/9383)) -- Move data portion of `SemanticModel` to dbt/artifacts ([#9387](https://github.com/dbt-labs/dbt-core/issues/9387)) -- Move data parts of `Exposure` class to dbt/artifacts ([#9380](https://github.com/dbt-labs/dbt-core/issues/9380)) -- Start using `Mergeable` from dbt-common ([#9505](https://github.com/dbt-labs/dbt-core/issues/9505)) -- Move manifest nodes to artifacts ([#9388](https://github.com/dbt-labs/dbt-core/issues/9388)) -- Move data parts of `SourceDefinition` class to dbt/artifacts ([#9384](https://github.com/dbt-labs/dbt-core/issues/9384)) -- Remove uses of Replaceable class ([#7802](https://github.com/dbt-labs/dbt-core/issues/7802)) -- Make dbt-core compatible with Python 3.12 ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007)) -- Restrict protobuf to major version 4. ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566)) -- Remove references to dbt.tracking and dbt.flags from dbt/artifacts ([#9390](https://github.com/dbt-labs/dbt-core/issues/9390)) -- Implement primary key inference for model nodes ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652)) -- Define UnitTestDefinition resource in dbt/artifacts/resources ([#9667](https://github.com/dbt-labs/dbt-core/issues/9667)) -- Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest ([#9567](https://github.com/dbt-labs/dbt-core/issues/9567)) - -### Dependencies - -- Bump actions/checkout from 3 to 4 ([#8781](https://github.com/dbt-labs/dbt-core/pull/8781)) -- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/pull/8892)) -- Update typing-extensions version to >=4.4 ([#9012](https://github.com/dbt-labs/dbt-core/pull/9012)) -- Bump ddtrace from 2.1.7 to 2.3.0 ([#9132](https://github.com/dbt-labs/dbt-core/pull/9132)) -- Bump freezegun from 0.3.12 to 1.3.0 ([#9197](https://github.com/dbt-labs/dbt-core/pull/9197)) -- Bump actions/setup-python from 4 to 5 ([#9267](https://github.com/dbt-labs/dbt-core/pull/9267)) -- Bump actions/download-artifact from 3 to 4 ([#9374](https://github.com/dbt-labs/dbt-core/pull/9374)) -- remove dbt/adapters and add dependency on dbt-adapters ([#9430](https://github.com/dbt-labs/dbt-core/pull/9430)) -- Bump actions/cache from 3 to 4 ([#9471](https://github.com/dbt-labs/dbt-core/pull/9471)) -- Bump peter-evans/create-pull-request from 5 to 6 ([#9552](https://github.com/dbt-labs/dbt-core/pull/9552)) -- Cap dbt-semantic-interfaces version range to <0.6 ([#9671](https://github.com/dbt-labs/dbt-core/pull/9671)) -- bump dbt-common to accept major version 1 ([#9690](https://github.com/dbt-labs/dbt-core/pull/9690)) - -### Security - -- Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 ([#CVE-2024-22195](https://github.com/dbt-labs/dbt-core/pull/CVE-2024-22195)) - -### Contributors -- [@LeoTheGriff](https://github.com/LeoTheGriff) ([#9003](https://github.com/dbt-labs/dbt-core/issues/9003)) -- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203)) -- [@adamlopez](https://github.com/adamlopez) ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621)) -- [@aliceliu](https://github.com/aliceliu) ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652)) -- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a)) -- [@colin-rorgers-dbt](https://github.com/colin-rorgers-dbt) ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919)) -- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507)) -- [@l1xnan](https://github.com/l1xnan) ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007)) -- [@mederka](https://github.com/mederka) ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976)) -- [@ofek1weiss](https://github.com/ofek1weiss) ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609)) -- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112)) -- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/pull/9012), [#9671](https://github.com/dbt-labs/dbt-core/pull/9671)) -- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252)) - ## Previous Releases For information on prior major and minor releases, see their changelogs: +* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md) * [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md) * [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md) * [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 898594e4860..7bc8520e4fd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -25,7 +25,7 @@ If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-developm - **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. - **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones. - **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...). If an issue fix applies to a release branch, that fix should be first committed to the development branch and then to the release branch (rarely release-branch fixes may not apply to `main`). -- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via pip, homebrew, and dbt Cloud. +- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via our [supported installation methods](https://docs.getdbt.com/docs/core/installation-overview#install-dbt-core). ## Getting the code diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000..b856030a1f6 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1 @@ +[About dbt Core versions](https://docs.getdbt.com/docs/dbt-versions/core) diff --git a/core/dbt/README.md b/core/dbt/README.md index 79123a95f47..6b545ceb888 100644 --- a/core/dbt/README.md +++ b/core/dbt/README.md @@ -22,8 +22,6 @@ ### links.py -### logger.py - ### main.py ### node_types.py diff --git a/core/dbt/artifacts/README.md b/core/dbt/artifacts/README.md index ebf28742fd4..0cb2ea96e25 100644 --- a/core/dbt/artifacts/README.md +++ b/core/dbt/artifacts/README.md @@ -1,24 +1,23 @@ # dbt/artifacts ## Overview -This directory is meant to be a lightweight module that is independent (and upstream of) the rest of dbt-core internals. +This directory is meant to be a lightweight module that is independent (and upstream of) the rest of `dbt-core` internals. -It's primary responsibility is to define simple data classes that represent the versioned artifact schemas that dbt writes as JSON files throughout execution. +Its primary responsibility is to define simple data classes that represent the versioned artifact schemas that dbt writes as JSON files throughout execution. -Long term, this module may be released as a standalone package (e.g. dbt-artifacts) to support stable parsing dbt artifacts programmatically. +Eventually, this module may be released as a standalone package (e.g. `dbt-artifacts`) to support stable programmatic parsing of dbt artifacts. -`dbt/artifacts` is organized into artifact 'schemas' and 'resources'. Schemas represent the final serialized artifact object, while resources represent sub-components of the larger artifact schemas. +`dbt/artifacts` is organized into artifact 'schemas' and 'resources'. Schemas represent the final serialized artifact objects, while resources represent smaller components within those schemas. ### dbt/artifacts/schemas - -Each major version of a schema under `dbt/artifacts/schema` is defined in its corresponding `dbt/artifacts/schema//v` directory. Before `dbt/artifacts` artifact schemas were always modified in-place, which is why artifacts are missing class definitions for historical versions. +Each major version of a schema under `dbt/artifacts/schema` is defined in its corresponding `dbt/artifacts/schema//v` directory. Before `dbt/artifacts` artifact schemas were always modified in-place, which is why older artifacts are those missing class definitions. Currently, there are four artifact schemas defined in `dbt/artifact/schemas`: | Artifact name | File | Class | Latest definition | |---------------|------------------|----------------------------------|-----------------------------------| -| manifest | manifest.json | WritableManifest | dbt/artifacts/schema/manifest/v11 | +| manifest | manifest.json | WritableManifest | dbt/artifacts/schema/manifest/v12 | | catalog | catalog.json | CatalogArtifact | dbt/artifacts/schema/catalog/v1 | | run | run_results.json | RunResultsArtifact | dbt/artifacts/schema/run/v5 | | freshness | sources.json | FreshnessExecutionResultArtifact | dbt/artifacts/schema/freshness/v3 | @@ -32,27 +31,40 @@ All existing resources are defined under `dbt/artifacts/resources/v1`. ### Non-breaking changes -Freely make incremental, non-breaking changes in-place to the latest major version of any artifact in mantle (via minor or patch bumps). The only changes that are fully forward and backward compatible are: +Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are: * Adding a new field with a default -* Deleting an __optional__ field +* Deleting a field with a default. This is compatible in terms of serialization and deserialization, but still may be lead to suprising behaviour: + * For artifact consumers relying on the fields existence (e.g. `manifest["deleted_field"]` will stop working unless the access was implemented safely) + * Old code (e.g. in dbt-core) that relies on the value of the deleted field may have surprising behaviour given only the default value will be set when instantiated from the new schema + +These types of minor, non-breaking changes are tested by [tests/unit/artifacts/test_base_resource.py::TestMinorSchemaChange](https://github.com/dbt-labs/dbt-core/blob/main/tests/unit/artifacts/test_base_resource.py). + + +#### Updating [schemas.getdbt.com](https://schemas.getdbt.com) +Non-breaking changes to artifact schemas require an update to the corresponding jsonschemas published to [schemas.getdbt.com](https://schemas.getdbt.com), which are defined in https://github.com/dbt-labs/schemas.getdbt.com. To do so: +1. Create a PR in https://github.com/dbt-labs/schemas.getdbt.com which reflects the schema changes to the artifact. The schema can be updated in-place for non-breaking changes. Example PR: https://github.com/dbt-labs/schemas.getdbt.com/pull/39 +2. Merge the https://github.com/dbt-labs/schemas.getdbt.com PR +3. Observe the `Artifact Schema Check` CI check pass on the `dbt-core` PR that updates the artifact schemas, and merge the `dbt-core` PR! + +Note: Although `jsonschema` validation using the schemas in [schemas.getdbt.com](https://schemas.getdbt.com) is not encouraged or formally supported, `jsonschema` validation should still continue to work once the schemas are updated because they are forward-compatible and can therefore be used to validate previous minor versions of the schema. ### Breaking changes -A breaking change is anything that: +A breaking change is anything that: * Deletes a required field * Changes the name or type of an existing field -* Removes default from a field +* Removes the default value of an existing field -These should generally be avoided, and bundled together to aim for as minimal disruption across the integration ecosystem as possible. +These should be avoided however possible. When necessary, multiple breaking changes should be bundled together, to aim for minimal disruption across the ecosystem of tools that leverage dbt metadata. -However, when it comes time to make one (or more) of these, a new versioned artifact should be created as follows: +When it comes time to make breaking changes, a new versioned artifact should be created as follows: 1. Create a new version directory and file that defines the new artifact schema under `dbt/artifacts/schemas//v/.py` 2. If any resources are having breaking changes introduced, create a new resource class that defines the new resource schema under `dbt/artifacts/resources/v/.py` 3. Implement upgrade paths on the new versioned artifact class so it can be constructed given a dictionary representation of any previous version of the same artifact - * TODO: update once the design is finalized + * TODO: link example once available 4. Implement downgrade paths on all previous versions of the artifact class so they can still be constructed given a dictionary representation of the new artifact schema - * TODO: update once the design is finalized + * TODO: link example once available 5. Update the 'latest' aliases to point to the new version of the artifact and/or resource: * Artifact: `dbt/artifacts/schemas//__init__.py ` * Resource: `dbt/artifacts/resources/__init__.py ` - Downstream consumers (e.g. dbt-core) importing from the latest alias are susceptible to breaking changes. Ideally, any incompatibilities should be caught my static type checking in those systems. However, it is always possible for consumers to pin imports to previous versions via `dbt.artifacts.schemas..v` +Downstream consumers (e.g. `dbt-core`) importing from the latest alias are susceptible to breaking changes. Ideally, any incompatibilities should be caught my static type checking in those systems. However, it is always possible for consumers to pin imports to previous versions via `dbt.artifacts.schemas..v`. diff --git a/core/dbt/artifacts/resources/__init__.py b/core/dbt/artifacts/resources/__init__.py index d937828e6d5..5d456dd6ef9 100644 --- a/core/dbt/artifacts/resources/__init__.py +++ b/core/dbt/artifacts/resources/__init__.py @@ -1,32 +1,29 @@ -from dbt.artifacts.resources.base import BaseResource, GraphResource, FileHash, Docs +from dbt.artifacts.resources.base import BaseResource, Docs, FileHash, GraphResource +from dbt.artifacts.resources.v1.analysis import Analysis # alias to latest resource definitions from dbt.artifacts.resources.v1.components import ( - DependsOn, - NodeVersion, - RefArgs, - HasRelationMetadata, - ParsedResourceMandatory, - ParsedResource, ColumnInfo, CompiledResource, - InjectedCTE, Contract, DeferRelation, + DependsOn, FreshnessThreshold, + HasRelationMetadata, + InjectedCTE, + NodeVersion, + ParsedResource, + ParsedResourceMandatory, Quoting, + RefArgs, Time, ) -from dbt.artifacts.resources.v1.analysis import Analysis -from dbt.artifacts.resources.v1.hook import HookNode -from dbt.artifacts.resources.v1.model import Model, ModelConfig -from dbt.artifacts.resources.v1.sql_operation import SqlOperation -from dbt.artifacts.resources.v1.seed import Seed, SeedConfig -from dbt.artifacts.resources.v1.singular_test import SingularTest -from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata -from dbt.artifacts.resources.v1.snapshot import Snapshot, SnapshotConfig - - +from dbt.artifacts.resources.v1.config import ( + Hook, + NodeAndTestConfig, + NodeConfig, + TestConfig, +) from dbt.artifacts.resources.v1.documentation import Documentation from dbt.artifacts.resources.v1.exposure import ( Exposure, @@ -34,8 +31,10 @@ ExposureType, MaturityType, ) -from dbt.artifacts.resources.v1.macro import Macro, MacroDependsOn, MacroArgument +from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata from dbt.artifacts.resources.v1.group import Group +from dbt.artifacts.resources.v1.hook import HookNode +from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsOn from dbt.artifacts.resources.v1.metric import ( ConstantPropertyInput, ConversionTypeParams, @@ -46,6 +45,7 @@ MetricTimeWindow, MetricTypeParams, ) +from dbt.artifacts.resources.v1.model import Model, ModelConfig from dbt.artifacts.resources.v1.owner import Owner from dbt.artifacts.resources.v1.saved_query import ( Export, @@ -55,6 +55,7 @@ SavedQueryConfig, SavedQueryMandatory, ) +from dbt.artifacts.resources.v1.seed import Seed, SeedConfig from dbt.artifacts.resources.v1.semantic_layer_components import ( FileSlice, SourceFileMetadata, @@ -74,28 +75,22 @@ SemanticModel, SemanticModelConfig, ) - -from dbt.artifacts.resources.v1.config import ( - NodeAndTestConfig, - NodeConfig, - TestConfig, - Hook, -) - +from dbt.artifacts.resources.v1.singular_test import SingularTest +from dbt.artifacts.resources.v1.snapshot import Snapshot, SnapshotConfig from dbt.artifacts.resources.v1.source_definition import ( - SourceConfig, ExternalPartition, ExternalTable, - SourceDefinition, ParsedSourceMandatory, + SourceConfig, + SourceDefinition, ) - +from dbt.artifacts.resources.v1.sql_operation import SqlOperation from dbt.artifacts.resources.v1.unit_test_definition import ( UnitTestConfig, UnitTestDefinition, + UnitTestFormat, UnitTestInputFixture, + UnitTestNodeVersions, UnitTestOutputFixture, UnitTestOverrides, - UnitTestNodeVersions, - UnitTestFormat, ) diff --git a/core/dbt/artifacts/resources/base.py b/core/dbt/artifacts/resources/base.py index dd66aa97d72..0911a997c19 100644 --- a/core/dbt/artifacts/resources/base.py +++ b/core/dbt/artifacts/resources/base.py @@ -1,9 +1,9 @@ +import hashlib from dataclasses import dataclass -from dbt_common.dataclass_schema import dbtClassMixin from typing import List, Optional -import hashlib from dbt.artifacts.resources.types import NodeType +from dbt_common.dataclass_schema import dbtClassMixin @dataclass diff --git a/core/dbt/artifacts/resources/v1/analysis.py b/core/dbt/artifacts/resources/v1/analysis.py index 60f90e61576..325db979c61 100644 --- a/core/dbt/artifacts/resources/v1/analysis.py +++ b/core/dbt/artifacts/resources/v1/analysis.py @@ -1,7 +1,8 @@ -from dbt.artifacts.resources.v1.components import CompiledResource -from typing import Literal from dataclasses import dataclass +from typing import Literal + from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource @dataclass diff --git a/core/dbt/artifacts/resources/v1/components.py b/core/dbt/artifacts/resources/v1/components.py index 27509fb6072..6e6605c18ab 100644 --- a/core/dbt/artifacts/resources/v1/components.py +++ b/core/dbt/artifacts/resources/v1/components.py @@ -1,15 +1,15 @@ import time from dataclasses import dataclass, field -from dbt.artifacts.resources.base import GraphResource, FileHash, Docs +from datetime import timedelta +from typing import Any, Dict, List, Optional, Union + +from dbt.artifacts.resources.base import Docs, FileHash, GraphResource +from dbt.artifacts.resources.types import NodeType, TimePeriod from dbt.artifacts.resources.v1.config import NodeConfig -from dbt_common.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin from dbt_common.contracts.config.properties import AdditionalPropertiesMixin from dbt_common.contracts.constraints import ColumnLevelConstraint -from typing import Dict, List, Optional, Union, Any -from datetime import timedelta -from dbt.artifacts.resources.types import TimePeriod from dbt_common.contracts.util import Mergeable - +from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin NodeVersion = Union[str, float] @@ -154,6 +154,14 @@ def quoting_dict(self) -> Dict[str, bool]: class DeferRelation(HasRelationMetadata): alias: str relation_name: Optional[str] + # The rest of these fields match RelationConfig protocol exactly + resource_type: NodeType + name: str + description: str + compiled_code: Optional[str] + meta: Dict[str, Any] + tags: List[str] + config: Optional[NodeConfig] @property def identifier(self): @@ -181,13 +189,18 @@ class ParsedResource(ParsedResourceMandatory): docs: Docs = field(default_factory=Docs) patch_path: Optional[str] = None build_path: Optional[str] = None - deferred: bool = False unrendered_config: Dict[str, Any] = field(default_factory=dict) created_at: float = field(default_factory=lambda: time.time()) config_call_dict: Dict[str, Any] = field(default_factory=dict) relation_name: Optional[str] = None raw_code: str = "" + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "config_call_dict" in dct: + del dct["config_call_dict"] + return dct + @dataclass class CompiledResource(ParsedResource): @@ -207,8 +220,8 @@ class CompiledResource(ParsedResource): _pre_injected_sql: Optional[str] = None contract: Contract = field(default_factory=Contract) - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "_pre_injected_sql" in dct: del dct["_pre_injected_sql"] # Remove compiled attributes diff --git a/core/dbt/artifacts/resources/v1/config.py b/core/dbt/artifacts/resources/v1/config.py index 7a3ef63c334..fc9be8f7c70 100644 --- a/core/dbt/artifacts/resources/v1/config.py +++ b/core/dbt/artifacts/resources/v1/config.py @@ -1,19 +1,18 @@ -from dbt_common.dataclass_schema import dbtClassMixin, ValidationError -from typing import Optional, List, Any, Dict, Union -from typing_extensions import Annotated +import re from dataclasses import dataclass, field -from dbt_common.contracts.config.base import ( - BaseConfig, - CompareBehavior, - MergeBehavior, -) -from dbt_common.contracts.config.metadata import Metadata, ShowBehavior -from dbt_common.contracts.config.materialization import OnConfigurationChangeOption +from typing import Any, Dict, List, Optional, Union + +from mashumaro.jsonschema.annotations import Pattern +from typing_extensions import Annotated + +from dbt import hooks from dbt.artifacts.resources.base import Docs from dbt.artifacts.resources.types import ModelHookType from dbt.artifacts.utils.validation import validate_color -from dbt import hooks -from mashumaro.jsonschema.annotations import Pattern +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.contracts.config.materialization import OnConfigurationChangeOption +from dbt_common.contracts.config.metadata import Metadata, ShowBehavior +from dbt_common.dataclass_schema import ValidationError, dbtClassMixin def list_str() -> List[str]: @@ -250,6 +249,12 @@ def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> boo @classmethod def validate(cls, data): + if data.get("severity") and not re.match(SEVERITY_PATTERN, data.get("severity")): + raise ValidationError( + f"Severity must be either 'warn' or 'error'. Got '{data.get('severity')}'" + ) + super().validate(data) + if data.get("materialized") and data.get("materialized") != "test": raise ValidationError("A test must have a materialized value of 'test'") diff --git a/core/dbt/artifacts/resources/v1/exposure.py b/core/dbt/artifacts/resources/v1/exposure.py index 1460c110d43..00f3c8b89e1 100644 --- a/core/dbt/artifacts/resources/v1/exposure.py +++ b/core/dbt/artifacts/resources/v1/exposure.py @@ -1,12 +1,13 @@ +import time from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + from dbt.artifacts.resources.base import GraphResource from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import DependsOn, RefArgs from dbt.artifacts.resources.v1.owner import Owner from dbt_common.contracts.config.base import BaseConfig from dbt_common.dataclass_schema import StrEnum -import time -from typing import Any, Dict, List, Literal, Optional class ExposureType(StrEnum): diff --git a/core/dbt/artifacts/resources/v1/generic_test.py b/core/dbt/artifacts/resources/v1/generic_test.py index b24be584b3a..504dbb07940 100644 --- a/core/dbt/artifacts/resources/v1/generic_test.py +++ b/core/dbt/artifacts/resources/v1/generic_test.py @@ -1,9 +1,10 @@ from dataclasses import dataclass, field -from typing import Optional, Any, Dict, Literal -from dbt_common.dataclass_schema import dbtClassMixin +from typing import Any, Dict, Literal, Optional + from dbt.artifacts.resources.types import NodeType -from dbt.artifacts.resources.v1.config import TestConfig from dbt.artifacts.resources.v1.components import CompiledResource +from dbt.artifacts.resources.v1.config import TestConfig +from dbt_common.dataclass_schema import dbtClassMixin @dataclass diff --git a/core/dbt/artifacts/resources/v1/hook.py b/core/dbt/artifacts/resources/v1/hook.py index dcfb4684c68..93fe314518e 100644 --- a/core/dbt/artifacts/resources/v1/hook.py +++ b/core/dbt/artifacts/resources/v1/hook.py @@ -1,7 +1,8 @@ from dataclasses import dataclass -from typing import Optional, Literal -from dbt.artifacts.resources.v1.components import CompiledResource +from typing import Literal, Optional + from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource @dataclass diff --git a/core/dbt/artifacts/resources/v1/macro.py b/core/dbt/artifacts/resources/v1/macro.py index be02d529ee1..c5154a9a6d4 100644 --- a/core/dbt/artifacts/resources/v1/macro.py +++ b/core/dbt/artifacts/resources/v1/macro.py @@ -1,11 +1,11 @@ -from dataclasses import dataclass, field import time -from typing import Literal, List, Dict, Optional, Any +from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional -from dbt_common.dataclass_schema import dbtClassMixin from dbt.artifacts.resources.base import BaseResource, Docs -from dbt.artifacts.resources.types import NodeType, ModelLanguage +from dbt.artifacts.resources.types import ModelLanguage, NodeType from dbt.artifacts.resources.v1.components import MacroDependsOn +from dbt_common.dataclass_schema import dbtClassMixin @dataclass diff --git a/core/dbt/artifacts/resources/v1/metric.py b/core/dbt/artifacts/resources/v1/metric.py index 84b484b5484..4f4dcfb98c9 100644 --- a/core/dbt/artifacts/resources/v1/metric.py +++ b/core/dbt/artifacts/resources/v1/metric.py @@ -1,6 +1,14 @@ import time - from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + +from dbt_semantic_interfaces.references import MeasureReference, MetricReference +from dbt_semantic_interfaces.type_enums import ( + ConversionCalculationType, + MetricType, + TimeGranularity, +) + from dbt.artifacts.resources.base import GraphResource from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import DependsOn, RefArgs @@ -10,14 +18,6 @@ ) from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior from dbt_common.dataclass_schema import dbtClassMixin -from dbt_semantic_interfaces.references import MeasureReference, MetricReference -from dbt_semantic_interfaces.type_enums import ( - ConversionCalculationType, - MetricType, - TimeGranularity, -) -from typing import Any, Dict, List, Literal, Optional - """ The following classes are dataclasses which are used to construct the Metric diff --git a/core/dbt/artifacts/resources/v1/model.py b/core/dbt/artifacts/resources/v1/model.py index afb5edaad54..821d04f147f 100644 --- a/core/dbt/artifacts/resources/v1/model.py +++ b/core/dbt/artifacts/resources/v1/model.py @@ -1,11 +1,16 @@ from dataclasses import dataclass, field -from typing import Literal, Optional, List from datetime import datetime +from typing import Dict, List, Literal, Optional + +from dbt.artifacts.resources.types import AccessType, NodeType +from dbt.artifacts.resources.v1.components import ( + CompiledResource, + DeferRelation, + NodeVersion, +) +from dbt.artifacts.resources.v1.config import NodeConfig from dbt_common.contracts.config.base import MergeBehavior from dbt_common.contracts.constraints import ModelLevelConstraint -from dbt.artifacts.resources.v1.config import NodeConfig -from dbt.artifacts.resources.types import AccessType, NodeType -from dbt.artifacts.resources.v1.components import DeferRelation, NodeVersion, CompiledResource @dataclass @@ -26,3 +31,10 @@ class Model(CompiledResource): latest_version: Optional[NodeVersion] = None deprecation_date: Optional[datetime] = None defer_relation: Optional[DeferRelation] = None + primary_key: List[str] = field(default_factory=list) + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "defer_relation" in dct: + del dct["defer_relation"] + return dct diff --git a/core/dbt/artifacts/resources/v1/saved_query.py b/core/dbt/artifacts/resources/v1/saved_query.py index 5f0575d26a7..8b024682a0f 100644 --- a/core/dbt/artifacts/resources/v1/saved_query.py +++ b/core/dbt/artifacts/resources/v1/saved_query.py @@ -1,8 +1,15 @@ from __future__ import annotations -import time +import time from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + +from dbt_semantic_interfaces.type_enums.export_destination_type import ( + ExportDestinationType, +) + from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import DependsOn, RefArgs from dbt.artifacts.resources.v1.semantic_layer_components import ( SourceFileMetadata, @@ -10,8 +17,6 @@ ) from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior from dbt_common.dataclass_schema import dbtClassMixin -from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType -from typing import Any, Dict, List, Optional @dataclass @@ -78,6 +83,7 @@ class SavedQueryMandatory(GraphResource): @dataclass class SavedQuery(SavedQueryMandatory): + resource_type: Literal[NodeType.SavedQuery] description: Optional[str] = None label: Optional[str] = None metadata: Optional[SourceFileMetadata] = None diff --git a/core/dbt/artifacts/resources/v1/seed.py b/core/dbt/artifacts/resources/v1/seed.py index 47a16352cf2..5328488b3c5 100644 --- a/core/dbt/artifacts/resources/v1/seed.py +++ b/core/dbt/artifacts/resources/v1/seed.py @@ -1,9 +1,14 @@ from dataclasses import dataclass, field -from typing import Optional, Literal -from dbt_common.dataclass_schema import ValidationError +from typing import Dict, Literal, Optional + from dbt.artifacts.resources.types import NodeType -from dbt.artifacts.resources.v1.components import MacroDependsOn, DeferRelation, ParsedResource +from dbt.artifacts.resources.v1.components import ( + DeferRelation, + MacroDependsOn, + ParsedResource, +) from dbt.artifacts.resources.v1.config import NodeConfig +from dbt_common.dataclass_schema import ValidationError @dataclass @@ -28,3 +33,9 @@ class Seed(ParsedResource): # No SQLDefaults! root_path: Optional[str] = None depends_on: MacroDependsOn = field(default_factory=MacroDependsOn) defer_relation: Optional[DeferRelation] = None + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "defer_relation" in dct: + del dct["defer_relation"] + return dct diff --git a/core/dbt/artifacts/resources/v1/semantic_layer_components.py b/core/dbt/artifacts/resources/v1/semantic_layer_components.py index 540317d0889..2288ea3e6ca 100644 --- a/core/dbt/artifacts/resources/v1/semantic_layer_components.py +++ b/core/dbt/artifacts/resources/v1/semantic_layer_components.py @@ -1,9 +1,13 @@ from dataclasses import dataclass -from dbt_common.dataclass_schema import dbtClassMixin -from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets -from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import WhereFilterParser from typing import List, Sequence, Tuple +from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets +from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import ( + WhereFilterParser, +) + +from dbt_common.dataclass_schema import dbtClassMixin + @dataclass class WhereFilter(dbtClassMixin): diff --git a/core/dbt/artifacts/resources/v1/semantic_model.py b/core/dbt/artifacts/resources/v1/semantic_model.py index 8a02aa5fa61..6abe48bb3da 100644 --- a/core/dbt/artifacts/resources/v1/semantic_model.py +++ b/core/dbt/artifacts/resources/v1/semantic_model.py @@ -1,10 +1,7 @@ import time - from dataclasses import dataclass, field -from dbt.artifacts.resources.base import GraphResource -from dbt.artifacts.resources.v1.components import DependsOn, RefArgs -from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior -from dbt_common.dataclass_schema import dbtClassMixin +from typing import Any, Dict, List, Optional, Sequence + from dbt_semantic_interfaces.references import ( DimensionReference, EntityReference, @@ -19,9 +16,12 @@ EntityType, TimeGranularity, ) -from dbt.artifacts.resources import SourceFileMetadata -from typing import Any, Dict, List, Optional, Sequence +from dbt.artifacts.resources import SourceFileMetadata +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.v1.components import DependsOn, RefArgs +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.dataclass_schema import dbtClassMixin """ The classes in this file are dataclasses which are used to construct the Semantic diff --git a/core/dbt/artifacts/resources/v1/singular_test.py b/core/dbt/artifacts/resources/v1/singular_test.py index 76b47183c51..3b025d79e11 100644 --- a/core/dbt/artifacts/resources/v1/singular_test.py +++ b/core/dbt/artifacts/resources/v1/singular_test.py @@ -1,5 +1,6 @@ from dataclasses import dataclass, field from typing import Literal + from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import CompiledResource from dbt.artifacts.resources.v1.config import TestConfig diff --git a/core/dbt/artifacts/resources/v1/snapshot.py b/core/dbt/artifacts/resources/v1/snapshot.py index 4c9cbea27c5..732e9a96b53 100644 --- a/core/dbt/artifacts/resources/v1/snapshot.py +++ b/core/dbt/artifacts/resources/v1/snapshot.py @@ -1,9 +1,10 @@ -from typing import Union, List, Optional, Literal from dataclasses import dataclass -from dbt_common.dataclass_schema import ValidationError +from typing import Dict, List, Literal, Optional, Union + from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import CompiledResource, DeferRelation from dbt.artifacts.resources.v1.config import NodeConfig +from dbt_common.dataclass_schema import ValidationError @dataclass @@ -64,3 +65,9 @@ class Snapshot(CompiledResource): resource_type: Literal[NodeType.Snapshot] config: SnapshotConfig defer_relation: Optional[DeferRelation] = None + + def __post_serialize__(self, dct, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "defer_relation" in dct: + del dct["defer_relation"] + return dct diff --git a/core/dbt/artifacts/resources/v1/source_definition.py b/core/dbt/artifacts/resources/v1/source_definition.py index e5a9ab1d98e..ac0fcfca1b2 100644 --- a/core/dbt/artifacts/resources/v1/source_definition.py +++ b/core/dbt/artifacts/resources/v1/source_definition.py @@ -1,6 +1,7 @@ import time - from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional, Union + from dbt.artifacts.resources.base import GraphResource from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import ( @@ -13,7 +14,6 @@ from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed from dbt_common.contracts.util import Mergeable from dbt_common.exceptions import CompilationError -from typing import Any, Dict, List, Literal, Optional, Union @dataclass diff --git a/core/dbt/artifacts/resources/v1/sql_operation.py b/core/dbt/artifacts/resources/v1/sql_operation.py index fd8e79b21a1..f669471f1dd 100644 --- a/core/dbt/artifacts/resources/v1/sql_operation.py +++ b/core/dbt/artifacts/resources/v1/sql_operation.py @@ -1,5 +1,6 @@ from dataclasses import dataclass from typing import Literal + from dbt.artifacts.resources.types import NodeType from dbt.artifacts.resources.v1.components import CompiledResource diff --git a/core/dbt/artifacts/resources/v1/unit_test_definition.py b/core/dbt/artifacts/resources/v1/unit_test_definition.py index fc265fa36b9..5c18538a733 100644 --- a/core/dbt/artifacts/resources/v1/unit_test_definition.py +++ b/core/dbt/artifacts/resources/v1/unit_test_definition.py @@ -1,18 +1,13 @@ -from dataclasses import dataclass, field import time -from typing import Optional, Sequence, Dict, Any, List, Union - -from dbt_common.contracts.config.base import ( - BaseConfig, - CompareBehavior, - MergeBehavior, -) -from dbt_common.contracts.config.metadata import ShowBehavior -from dbt_common.dataclass_schema import dbtClassMixin, StrEnum +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Sequence, Union -from dbt.artifacts.resources.v1.config import metas, list_str +from dbt.artifacts.resources import DependsOn, NodeVersion from dbt.artifacts.resources.base import GraphResource -from dbt.artifacts.resources import NodeVersion, DependsOn +from dbt.artifacts.resources.v1.config import list_str, metas +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.contracts.config.metadata import ShowBehavior +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin @dataclass diff --git a/core/dbt/artifacts/schemas/base.py b/core/dbt/artifacts/schemas/base.py index d7c206a218e..c807257a24b 100644 --- a/core/dbt/artifacts/schemas/base.py +++ b/core/dbt/artifacts/schemas/base.py @@ -1,19 +1,18 @@ import dataclasses -from datetime import datetime import functools +from datetime import datetime +from typing import Any, ClassVar, Dict, Optional, Type, TypeVar + from mashumaro.jsonschema import build_json_schema from mashumaro.jsonschema.dialects import DRAFT_2020_12 -from typing import ClassVar, Type, TypeVar, Dict, Any, Optional -from dbt_common.clients.system import write_json, read_json -from dbt_common.exceptions import DbtInternalError, DbtRuntimeError +from dbt.artifacts.exceptions import IncompatibleSchemaError +from dbt.version import __version__ +from dbt_common.clients.system import read_json, write_json +from dbt_common.dataclass_schema import dbtClassMixin from dbt_common.events.functions import get_metadata_vars +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError from dbt_common.invocation import get_invocation_id -from dbt_common.dataclass_schema import dbtClassMixin - -from dbt.version import __version__ -from dbt.artifacts.exceptions import IncompatibleSchemaError - BASE_SCHEMAS_URL = "https://schemas.getdbt.com/" SCHEMA_PATH = "dbt/{name}/v{version}.json" @@ -34,7 +33,7 @@ def __str__(self) -> str: class Writable: def write(self, path: str): - write_json(path, self.to_dict(omit_none=False)) # type: ignore + write_json(path, self.to_dict(omit_none=False, context={"artifact": True})) # type: ignore class Readable: @@ -60,8 +59,8 @@ class BaseArtifactMetadata(dbtClassMixin): invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id) env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_vars) - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if dct["generated_at"] and dct["generated_at"].endswith("+00:00"): dct["generated_at"] = dct["generated_at"].replace("+00:00", "") + "Z" return dct @@ -89,7 +88,7 @@ def inner(cls: Type[VersionedSchema]): return inner -# This is used in the ArtifactMixin and RemoteResult classes +# This is used in the ArtifactMixin and RemoteCompileResultMixin classes @dataclasses.dataclass class VersionedSchema(dbtClassMixin): dbt_schema_version: ClassVar[SchemaVersion] diff --git a/core/dbt/artifacts/schemas/catalog/v1/catalog.py b/core/dbt/artifacts/schemas/catalog/v1/catalog.py index bd6571152be..d6d02608bca 100644 --- a/core/dbt/artifacts/schemas/catalog/v1/catalog.py +++ b/core/dbt/artifacts/schemas/catalog/v1/catalog.py @@ -1,10 +1,14 @@ -from typing import Dict, Union, Optional, NamedTuple, Any, List from dataclasses import dataclass, field from datetime import datetime +from typing import Any, Dict, List, NamedTuple, Optional, Union +from dbt.artifacts.schemas.base import ( + ArtifactMixin, + BaseArtifactMetadata, + schema_version, +) from dbt_common.dataclass_schema import dbtClassMixin from dbt_common.utils.formatting import lowercase -from dbt.artifacts.schemas.base import ArtifactMixin, BaseArtifactMetadata, schema_version Primitive = Union[bool, str, float, None] PrimitiveDict = Dict[str, Primitive] @@ -77,8 +81,8 @@ class CatalogResults(dbtClassMixin): errors: Optional[List[str]] = None _compile_results: Optional[Any] = None - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "_compile_results" in dct: del dct["_compile_results"] return dct diff --git a/core/dbt/artifacts/schemas/freshness/v3/freshness.py b/core/dbt/artifacts/schemas/freshness/v3/freshness.py index a9b956d2863..fe489994268 100644 --- a/core/dbt/artifacts/schemas/freshness/v3/freshness.py +++ b/core/dbt/artifacts/schemas/freshness/v3/freshness.py @@ -1,19 +1,23 @@ from dataclasses import dataclass, field -from typing import Dict, Any, Sequence, List, Union, Optional from datetime import datetime +from typing import Any, Dict, List, Optional, Sequence, Union from dbt.artifacts.resources import FreshnessThreshold -from dbt.artifacts.schemas.results import ExecutionResult, FreshnessStatus, NodeResult, TimingInfo from dbt.artifacts.schemas.base import ( ArtifactMixin, + BaseArtifactMetadata, VersionedSchema, schema_version, - BaseArtifactMetadata, ) -from dbt_common.dataclass_schema import dbtClassMixin, StrEnum -from dbt_common.exceptions import DbtInternalError - +from dbt.artifacts.schemas.results import ( + ExecutionResult, + FreshnessStatus, + NodeResult, + TimingInfo, +) from dbt.contracts.graph.nodes import SourceDefinition +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.exceptions import DbtInternalError @dataclass diff --git a/core/dbt/artifacts/schemas/manifest/v12/manifest.py b/core/dbt/artifacts/schemas/manifest/v12/manifest.py index 2ac3f3d761c..cc13fca43f5 100644 --- a/core/dbt/artifacts/schemas/manifest/v12/manifest.py +++ b/core/dbt/artifacts/schemas/manifest/v12/manifest.py @@ -1,34 +1,33 @@ from dataclasses import dataclass, field -from typing import Mapping, Iterable, Tuple, Optional, Dict, List, Any, Union +from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union from uuid import UUID -from dbt.artifacts.schemas.base import ( - BaseArtifactMetadata, - ArtifactMixin, - schema_version, - get_artifact_schema_version, -) -from dbt.artifacts.schemas.upgrades import upgrade_manifest_json from dbt.artifacts.resources import ( + Analysis, Documentation, Exposure, + GenericTest, Group, + HookNode, Macro, Metric, + Model, SavedQuery, - SemanticModel, - SourceDefinition, - UnitTestDefinition, Seed, - Analysis, + SemanticModel, SingularTest, - HookNode, - Model, - SqlOperation, - GenericTest, Snapshot, + SourceDefinition, + SqlOperation, + UnitTestDefinition, ) - +from dbt.artifacts.schemas.base import ( + ArtifactMixin, + BaseArtifactMetadata, + get_artifact_schema_version, + schema_version, +) +from dbt.artifacts.schemas.upgrades import upgrade_manifest_json NodeEdgeMap = Dict[str, List[str]] UniqueID = str @@ -181,11 +180,3 @@ def upgrade_schema_version(cls, data): if manifest_schema_version < cls.dbt_schema_version.version: data = upgrade_manifest_json(data, manifest_schema_version) return cls.from_dict(data) - - def __post_serialize__(self, dct): - for unique_id, node in dct["nodes"].items(): - if "config_call_dict" in node: - del node["config_call_dict"] - if "defer_relation" in node: - del node["defer_relation"] - return dct diff --git a/core/dbt/artifacts/schemas/results.py b/core/dbt/artifacts/schemas/results.py index 5b36d7fe6fb..f3f830ca927 100644 --- a/core/dbt/artifacts/schemas/results.py +++ b/core/dbt/artifacts/schemas/results.py @@ -1,12 +1,12 @@ -from dbt.contracts.graph.nodes import ResultNode -from dbt_common.events.helpers import datetime_to_json_string -from dbt_common.utils import cast_to_str, cast_to_int -from dbt_common.dataclass_schema import dbtClassMixin, StrEnum - from dataclasses import dataclass from datetime import datetime from typing import Any, Callable, Dict, List, Optional, Sequence, Union +from dbt.contracts.graph.nodes import ResultNode +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.events.helpers import datetime_to_json_string +from dbt_common.utils import cast_to_int, cast_to_str + @dataclass class TimingInfo(dbtClassMixin): diff --git a/core/dbt/artifacts/schemas/run/v5/run.py b/core/dbt/artifacts/schemas/run/v5/run.py index 47cc0cb3b87..2724554346e 100644 --- a/core/dbt/artifacts/schemas/run/v5/run.py +++ b/core/dbt/artifacts/schemas/run/v5/run.py @@ -1,31 +1,32 @@ -import threading -from typing import Any, Optional, Iterable, Tuple, Sequence, Dict, TYPE_CHECKING import copy +import threading from dataclasses import dataclass, field from datetime import datetime +from typing import Any, Dict, Iterable, Optional, Sequence, Tuple +# https://github.com/dbt-labs/dbt-core/issues/10098 +# Needed for Mashumaro serialization of RunResult below +# TODO: investigate alternative approaches to restore conditional import +# if TYPE_CHECKING: +import agate -from dbt.constants import SECRET_ENV_PREFIX from dbt.artifacts.resources import CompiledResource from dbt.artifacts.schemas.base import ( - BaseArtifactMetadata, ArtifactMixin, - schema_version, + BaseArtifactMetadata, get_artifact_schema_version, + schema_version, ) from dbt.artifacts.schemas.results import ( BaseResult, + ExecutionResult, NodeResult, - RunStatus, ResultNode, - ExecutionResult, + RunStatus, ) -from dbt_common.clients.system import write_json from dbt.exceptions import scrub_secrets - - -if TYPE_CHECKING: - import agate +from dbt_common.clients.system import write_json +from dbt_common.constants import SECRET_ENV_PREFIX @dataclass diff --git a/core/dbt/cli/context.py b/core/dbt/cli/context.py index b8f541b9ad8..f0be810e1e4 100644 --- a/core/dbt/cli/context.py +++ b/core/dbt/cli/context.py @@ -1,6 +1,7 @@ -import click from typing import Optional +import click + from dbt.cli.main import cli as dbt diff --git a/core/dbt/cli/exceptions.py b/core/dbt/cli/exceptions.py index d88f91c01ac..54479c30900 100644 --- a/core/dbt/cli/exceptions.py +++ b/core/dbt/cli/exceptions.py @@ -1,6 +1,7 @@ -from typing import Optional, IO +from typing import IO, Optional from click.exceptions import ClickException + from dbt.utils import ExitCodes diff --git a/core/dbt/cli/flags.py b/core/dbt/cli/flags.py index d87c1aa96eb..d3cfd707cbb 100644 --- a/core/dbt/cli/flags.py +++ b/core/dbt/cli/flags.py @@ -6,20 +6,22 @@ from pprint import pformat as pf from typing import Any, Callable, Dict, List, Optional, Set, Union -from click import Context, get_current_context, Parameter -from click.core import Command as ClickCommand, Group, ParameterSource +from click import Context, Parameter, get_current_context +from click.core import Command as ClickCommand +from click.core import Group, ParameterSource + from dbt.cli.exceptions import DbtUsageException from dbt.cli.resolvers import default_log_path, default_project_dir from dbt.cli.types import Command as CliCommand from dbt.config.project import read_project_flags from dbt.contracts.project import ProjectFlags +from dbt.deprecations import renamed_env_var +from dbt.events import ALL_EVENT_NAMES from dbt_common import ui +from dbt_common.clients import jinja from dbt_common.events import functions from dbt_common.exceptions import DbtInternalError -from dbt_common.clients import jinja -from dbt.deprecations import renamed_env_var from dbt_common.helper_types import WarnErrorOptions -from dbt.events import ALL_EVENT_NAMES if os.name != "nt": # https://bugs.python.org/issue41567 @@ -355,7 +357,7 @@ def set_common_global_flags(self): # This is here to prevent mypy from complaining about all of the # attributes which we added dynamically. def __getattr__(self, name: str) -> Any: - return super().__get_attribute__(name) # type: ignore + return super().__getattribute__(name) # type: ignore CommandParams = List[str] diff --git a/core/dbt/cli/main.py b/core/dbt/cli/main.py index 07a9de861a7..91d9d1d7668 100644 --- a/core/dbt/cli/main.py +++ b/core/dbt/cli/main.py @@ -4,21 +4,16 @@ from typing import Callable, List, Optional, Union import click -from click.exceptions import ( - Exit as ClickExit, - BadOptionUsage, - NoSuchOption, - UsageError, -) +from click.exceptions import BadOptionUsage +from click.exceptions import Exit as ClickExit +from click.exceptions import NoSuchOption, UsageError -from dbt.cli import requires, params as p -from dbt.cli.exceptions import ( - DbtInternalException, - DbtUsageException, -) -from dbt.contracts.graph.manifest import Manifest from dbt.artifacts.schemas.catalog import CatalogArtifact from dbt.artifacts.schemas.run import RunExecutionResult +from dbt.cli import params as p +from dbt.cli import requires +from dbt.cli.exceptions import DbtInternalException, DbtUsageException +from dbt.contracts.graph.manifest import Manifest from dbt_common.events.base_types import EventMsg @@ -109,7 +104,6 @@ def global_flags(func): @p.deprecated_favor_state @p.deprecated_print @p.deprecated_state - @p.enable_legacy_logger @p.fail_fast @p.favor_state @p.indirect_selection @@ -170,6 +164,7 @@ def cli(ctx, **kwargs): @cli.command("build") @click.pass_context @global_flags +@p.empty @p.exclude @p.export_saved_queries @p.full_refresh diff --git a/core/dbt/cli/option_types.py b/core/dbt/cli/option_types.py index d55aa736e16..6c94116eb67 100644 --- a/core/dbt/cli/option_types.py +++ b/core/dbt/cli/option_types.py @@ -1,10 +1,9 @@ -from click import ParamType, Choice +from click import Choice, ParamType -from dbt.config.utils import parse_cli_yaml_string +from dbt.config.utils import exclusive_primary_alt_value_setting, parse_cli_yaml_string from dbt.events import ALL_EVENT_NAMES -from dbt.exceptions import ValidationError, OptionNotYamlDictError +from dbt.exceptions import OptionNotYamlDictError, ValidationError from dbt_common.exceptions import DbtValidationError - from dbt_common.helper_types import WarnErrorOptions @@ -52,10 +51,17 @@ class WarnErrorOptionsType(YAML): def convert(self, value, param, ctx): # this function is being used by param in click include_exclude = super().convert(value, param, ctx) + exclusive_primary_alt_value_setting( + include_exclude, "include", "error", "warn_error_options" + ) + exclusive_primary_alt_value_setting( + include_exclude, "exclude", "warn", "warn_error_options" + ) return WarnErrorOptions( include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", []), + silence=include_exclude.get("silence", []), valid_error_names=ALL_EVENT_NAMES, ) diff --git a/core/dbt/cli/options.py b/core/dbt/cli/options.py index bf0749ae002..c73a4bcd374 100644 --- a/core/dbt/cli/options.py +++ b/core/dbt/cli/options.py @@ -1,8 +1,10 @@ -import click import inspect import typing as t + +import click from click import Context from click.parser import OptionParser, ParsingState + from dbt.cli.option_types import ChoiceTuple diff --git a/core/dbt/cli/params.py b/core/dbt/cli/params.py index b2716728ce6..0c24597a3bc 100644 --- a/core/dbt/cli/params.py +++ b/core/dbt/cli/params.py @@ -1,9 +1,10 @@ from pathlib import Path import click + +from dbt.cli.option_types import YAML, ChoiceTuple, Package, WarnErrorOptionsType from dbt.cli.options import MultiOption -from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType, Package -from dbt.cli.resolvers import default_project_dir, default_profiles_dir +from dbt.cli.resolvers import default_profiles_dir, default_project_dir from dbt.version import get_version_information add_package = click.option( @@ -90,12 +91,6 @@ is_flag=True, ) -enable_legacy_logger = click.option( - "--enable-legacy-logger/--no-enable-legacy-logger", - envvar="DBT_ENABLE_LEGACY_LOGGER", - hidden=True, -) - exclude = click.option( "--exclude", envvar=None, diff --git a/core/dbt/cli/requires.py b/core/dbt/cli/requires.py index 75c81ebd7e1..c9d38c55eec 100644 --- a/core/dbt/cli/requires.py +++ b/core/dbt/cli/requires.py @@ -1,49 +1,48 @@ +import importlib.util import os +import time +import traceback +from functools import update_wrapper +from typing import Optional -import dbt.tracking -from dbt_common.context import set_invocation_context -from dbt_common.invocation import reset_invocation_id +from click import Context -from dbt.version import installed as installed_version -from dbt.adapters.factory import adapter_management, register_adapter, get_adapter -from dbt.context.providers import generate_runtime_macro_context -from dbt.flags import set_flags, get_flag_dict -from dbt.cli.exceptions import ( - ExceptionExit, - ResultExit, -) +import dbt.tracking +from dbt.adapters.factory import adapter_management, get_adapter, register_adapter +from dbt.cli.exceptions import ExceptionExit, ResultExit from dbt.cli.flags import Flags from dbt.config import RuntimeConfig -from dbt.config.runtime import load_project, load_profile, UnsetProfile +from dbt.config.runtime import UnsetProfile, load_profile, load_project +from dbt.context.providers import generate_runtime_macro_context from dbt.context.query_header import generate_query_header_context - -from dbt_common.events.base_types import EventLevel -from dbt_common.events.functions import ( - fire_event, - LOG_VERSION, -) from dbt.events.logging import setup_event_logger from dbt.events.types import ( - MainReportVersion, + CommandCompleted, + MainEncounteredError, MainReportArgs, + MainReportVersion, + MainStackTrace, MainTrackingUserState, + ResourceReport, ) -from dbt_common.events.helpers import get_json_string_utcnow -from dbt.events.types import CommandCompleted, MainEncounteredError, MainStackTrace, ResourceReport -from dbt_common.exceptions import DbtBaseException as DbtException from dbt.exceptions import DbtProjectError, FailFastError +from dbt.flags import get_flag_dict, set_flags +from dbt.mp_context import get_mp_context from dbt.parser.manifest import parse_manifest +from dbt.plugins import set_up_plugin_manager from dbt.profiler import profiler from dbt.tracking import active_user, initialize_from_flags, track_run +from dbt.utils import try_get_max_rss_kb +from dbt.version import installed as installed_version +from dbt_common.clients.system import get_env +from dbt_common.context import get_invocation_context, set_invocation_context +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import LOG_VERSION, fire_event +from dbt_common.events.helpers import get_json_string_utcnow +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.invocation import reset_invocation_id +from dbt_common.record import Recorder, RecorderMode, get_record_mode_from_env from dbt_common.utils import cast_dict_to_dict_of_strings -from dbt.plugins import set_up_plugin_manager -from dbt.mp_context import get_mp_context - -from click import Context -from functools import update_wrapper -import importlib.util -import time -import traceback def preflight(func): @@ -52,7 +51,14 @@ def wrapper(*args, **kwargs): assert isinstance(ctx, Context) ctx.obj = ctx.obj or {} - set_invocation_context(os.environ) + set_invocation_context({}) + + # Record/Replay + setup_record_replay() + + # Must be set after record/replay is set up so that the env can be + # recorded or replayed if needed. + get_invocation_context()._env = get_env() # Flags flags = Flags(ctx) @@ -93,6 +99,28 @@ def wrapper(*args, **kwargs): return update_wrapper(wrapper, func) +def setup_record_replay(): + rec_mode = get_record_mode_from_env() + + recorder: Optional[Recorder] = None + if rec_mode == RecorderMode.REPLAY: + recording_path = os.environ["DBT_REPLAY"] + recorder = Recorder(RecorderMode.REPLAY, recording_path) + elif rec_mode == RecorderMode.RECORD: + recorder = Recorder(RecorderMode.RECORD) + + get_invocation_context().recorder = recorder + + +def tear_down_record_replay(): + recorder = get_invocation_context().recorder + if recorder is not None: + if recorder.mode == RecorderMode.RECORD: + recorder.write("recording.json") + elif recorder.mode == RecorderMode.REPLAY: + recorder.write_diffs("replay_diffs.json") + + def postflight(func): """The decorator that handles all exception handling for the click commands. This decorator must be used before any other decorators that may throw an exception.""" @@ -128,7 +156,7 @@ def wrapper(*args, **kwargs): command_wall_clock_time=time.perf_counter() - start_func, process_user_time=rusage.ru_utime, process_kernel_time=rusage.ru_stime, - process_mem_max_rss=rusage.ru_maxrss, + process_mem_max_rss=try_get_max_rss_kb() or rusage.ru_maxrss, process_in_blocks=rusage.ru_inblock, process_out_blocks=rusage.ru_oublock, ), @@ -146,6 +174,8 @@ def wrapper(*args, **kwargs): ) ) + tear_down_record_replay() + if not success: raise ResultExit(result) @@ -281,7 +311,7 @@ def wrapper(*args, **kwargs): runtime_config = ctx.obj["runtime_config"] - # a manifest has already been set on the context, so don't overwrite it + # if a manifest has already been set on the context, don't overwrite it if ctx.obj.get("manifest") is None: ctx.obj["manifest"] = parse_manifest( runtime_config, write_perf_info, write, ctx.obj["flags"].write_json diff --git a/core/dbt/cli/resolvers.py b/core/dbt/cli/resolvers.py index 48ba92c365a..6d495501c39 100644 --- a/core/dbt/cli/resolvers.py +++ b/core/dbt/cli/resolvers.py @@ -1,4 +1,5 @@ from pathlib import Path + from dbt.config.project import PartialProject from dbt.exceptions import DbtProjectError diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py index 4da1c323327..33beb93a6ba 100644 --- a/core/dbt/clients/git.py +++ b/core/dbt/clients/git.py @@ -1,25 +1,26 @@ -import re import os.path +import re + +from packaging import version -from dbt_common.clients.system import run_cmd, rmdir -from dbt_common.events.functions import fire_event from dbt.events.types import ( - GitSparseCheckoutSubdirectory, + GitNothingToDo, + GitProgressCheckedOutAt, GitProgressCheckoutRevision, - GitProgressUpdatingExistingDependency, GitProgressPullingNewDependency, - GitNothingToDo, GitProgressUpdatedCheckoutRange, - GitProgressCheckedOutAt, + GitProgressUpdatingExistingDependency, + GitSparseCheckoutSubdirectory, ) from dbt.exceptions import ( CommandResultError, + DbtRuntimeError, GitCheckoutError, GitCloningError, UnknownGitCloningProblemError, - DbtRuntimeError, ) -from packaging import version +from dbt_common.clients.system import rmdir, run_cmd +from dbt_common.events.functions import fire_event def _is_commit(revision: str) -> bool: diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py index 514bbcc342f..4aa6f013916 100644 --- a/core/dbt/clients/jinja.py +++ b/core/dbt/clients/jinja.py @@ -1,7 +1,7 @@ import re import threading from contextlib import contextmanager -from typing import List, Union, Optional, Dict, Any, NoReturn, Tuple +from typing import Any, Dict, List, NoReturn, Optional, Tuple, Union import jinja2 import jinja2.ext @@ -10,22 +10,20 @@ import jinja2.parser import jinja2.sandbox -from dbt_common.clients.jinja import ( - render_template, - get_template, - CallableMacroGenerator, - MacroProtocol, -) -from dbt_common.utils import deep_map_render from dbt.contracts.graph.nodes import GenericTestNode - from dbt.exceptions import ( DbtInternalError, MaterializtionMacroNotUsedError, NoSupportedLanguagesFoundError, ) from dbt.node_types import ModelLanguage - +from dbt_common.clients.jinja import ( + CallableMacroGenerator, + MacroProtocol, + get_template, + render_template, +) +from dbt_common.utils import deep_map_render SUPPORTED_LANG_ARG = jinja2.nodes.Name("supported_languages", "param") diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py index aa9c72d7d79..8e0c34df2e6 100644 --- a/core/dbt/clients/jinja_static.py +++ b/core/dbt/clients/jinja_static.py @@ -1,11 +1,11 @@ from typing import Any, Dict, Optional import jinja2 -from dbt_common.clients.jinja import get_environment -from dbt_common.tests import test_caching_enabled + from dbt.exceptions import MacroNamespaceNotStringError +from dbt_common.clients.jinja import get_environment from dbt_common.exceptions.macros import MacroNameNotStringError - +from dbt_common.tests import test_caching_enabled _TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {} diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py index 3a160c7f791..ed634e9f811 100644 --- a/core/dbt/clients/registry.py +++ b/core/dbt/clients/registry.py @@ -1,22 +1,24 @@ import functools +import os from typing import Any, Dict, List + import requests -from dbt_common.events.functions import fire_event + +from dbt import deprecations from dbt.events.types import ( - RegistryProgressGETRequest, - RegistryProgressGETResponse, RegistryIndexProgressGETRequest, RegistryIndexProgressGETResponse, - RegistryResponseUnexpectedType, - RegistryResponseMissingTopKeys, - RegistryResponseMissingNestedKeys, + RegistryProgressGETRequest, + RegistryProgressGETResponse, RegistryResponseExtraNestedKeys, + RegistryResponseMissingNestedKeys, + RegistryResponseMissingTopKeys, + RegistryResponseUnexpectedType, ) from dbt.utils import memoized -from dbt_common.utils.connection import connection_exception_retry -from dbt import deprecations from dbt_common import semver -import os +from dbt_common.events.functions import fire_event +from dbt_common.utils.connection import connection_exception_retry if os.getenv("DBT_PACKAGE_HUB_URL"): DEFAULT_REGISTRY_BASE_URL = os.getenv("DBT_PACKAGE_HUB_URL") diff --git a/core/dbt/clients/yaml_helper.py b/core/dbt/clients/yaml_helper.py index 95f65e397ae..a0a51099331 100644 --- a/core/dbt/clients/yaml_helper.py +++ b/core/dbt/clients/yaml_helper.py @@ -1,13 +1,17 @@ -import dbt_common.exceptions.base -import dbt_common.exceptions from typing import Any, Dict, Optional + import yaml +import dbt_common.exceptions +import dbt_common.exceptions.base + # the C version is faster, but it doesn't always exist try: - from yaml import CLoader as Loader, CSafeLoader as SafeLoader, CDumper as Dumper + from yaml import CDumper as Dumper + from yaml import CLoader as Loader + from yaml import CSafeLoader as SafeLoader except ImportError: - from yaml import Loader, SafeLoader, Dumper # type: ignore # noqa: F401 + from yaml import Dumper, Loader, SafeLoader # type: ignore # noqa: F401 YAML_ERROR_MESSAGE = """ diff --git a/core/dbt/compilation.py b/core/dbt/compilation.py index 09695fc59d7..d03407b2a4c 100644 --- a/core/dbt/compilation.py +++ b/core/dbt/compilation.py @@ -1,46 +1,45 @@ import json - -import networkx as nx # type: ignore import os import pickle - from collections import defaultdict -from typing import List, Dict, Any, Tuple, Optional +from typing import Any, Dict, List, Optional, Tuple -from dbt_common.invocation import get_invocation_id -from dbt.flags import get_flags +import networkx as nx # type: ignore +import sqlparse + +import dbt.tracking from dbt.adapters.factory import get_adapter from dbt.clients import jinja from dbt.context.providers import ( generate_runtime_model_context, generate_runtime_unit_test_context, ) -from dbt_common.clients.system import make_directory from dbt.contracts.graph.manifest import Manifest, UniqueID from dbt.contracts.graph.nodes import ( - ManifestNode, - ManifestSQLNode, GenericTestNode, GraphMemberNode, InjectedCTE, + ManifestNode, + ManifestSQLNode, SeedNode, - UnitTestNode, UnitTestDefinition, + UnitTestNode, ) +from dbt.events.types import FoundStats, WritingInjectedSQLForNode from dbt.exceptions import ( - GraphDependencyNotFoundError, DbtInternalError, DbtRuntimeError, + GraphDependencyNotFoundError, ) +from dbt.flags import get_flags from dbt.graph import Graph -from dbt_common.events.functions import fire_event -from dbt_common.events.types import Note +from dbt.node_types import ModelLanguage, NodeType +from dbt_common.clients.system import make_directory from dbt_common.events.contextvars import get_node_info -from dbt.events.types import WritingInjectedSQLForNode, FoundStats -from dbt.node_types import NodeType, ModelLanguage from dbt_common.events.format import pluralize -import dbt.tracking -import sqlparse +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.invocation import get_invocation_id graph_file_name = "graph.gpickle" diff --git a/core/dbt/config/__init__.py b/core/dbt/config/__init__.py index 767901c84eb..1f3875422c9 100644 --- a/core/dbt/config/__init__.py +++ b/core/dbt/config/__init__.py @@ -1,4 +1,4 @@ # all these are just exports, they need "noqa" so flake8 will not complain. from .profile import Profile # noqa -from .project import Project, IsFQNResource, PartialProject # noqa +from .project import IsFQNResource, PartialProject, Project # noqa from .runtime import RuntimeConfig # noqa diff --git a/core/dbt/config/profile.py b/core/dbt/config/profile.py index 2b7ad51b562..ada7f30711c 100644 --- a/core/dbt/config/profile.py +++ b/core/dbt/config/profile.py @@ -1,14 +1,10 @@ +import os from dataclasses import dataclass from typing import Any, Dict, Optional, Tuple -import os - -from dbt_common.dataclass_schema import ValidationError -from dbt.flags import get_flags -from dbt_common.clients.system import load_file_contents +from dbt.adapters.contracts.connection import Credentials, HasCredentials from dbt.clients.yaml_helper import load_yaml_text from dbt.contracts.project import ProfileConfig -from dbt.adapters.contracts.connection import Credentials, HasCredentials from dbt.events.types import MissingProfileTarget from dbt.exceptions import ( CompilationError, @@ -17,8 +13,11 @@ DbtRuntimeError, ProfileConfigError, ) -from dbt_common.exceptions import DbtValidationError +from dbt.flags import get_flags +from dbt_common.clients.system import load_file_contents +from dbt_common.dataclass_schema import ValidationError from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtValidationError from .renderer import ProfileRenderer diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py index 7cdbebf0328..7f396477fd6 100644 --- a/core/dbt/config/project.py +++ b/core/dbt/config/project.py @@ -1,59 +1,50 @@ +import os from copy import deepcopy from dataclasses import dataclass, field from itertools import chain -from typing import ( - List, - Dict, - Any, - Optional, - TypeVar, - Union, - Mapping, -) -from typing_extensions import Protocol, runtime_checkable +from typing import Any, Dict, List, Mapping, Optional, TypeVar, Union -import os +from typing_extensions import Protocol, runtime_checkable -from dbt.flags import get_flags from dbt import deprecations +from dbt.adapters.contracts.connection import QueryComment +from dbt.clients.yaml_helper import load_yaml_text +from dbt.config.selectors import SelectorDict +from dbt.config.utils import exclusive_primary_alt_value_setting from dbt.constants import ( + DBT_PROJECT_FILE_NAME, DEPENDENCIES_FILE_NAME, - PACKAGES_FILE_NAME, PACKAGE_LOCK_HASH_KEY, - DBT_PROJECT_FILE_NAME, + PACKAGES_FILE_NAME, ) -from dbt_common.clients.system import path_exists, load_file_contents -from dbt.clients.yaml_helper import load_yaml_text -from dbt.adapters.contracts.connection import QueryComment +from dbt.contracts.project import PackageConfig +from dbt.contracts.project import Project as ProjectContract +from dbt.contracts.project import ProjectFlags, ProjectPackageMetadata, SemverString from dbt.exceptions import ( + DbtExclusivePropertyUseError, DbtProjectError, + DbtRuntimeError, ProjectContractBrokenError, ProjectContractError, - DbtRuntimeError, ) -from dbt_common.exceptions import SemverError +from dbt.flags import get_flags from dbt.graph import SelectionSpec -from dbt_common.helper_types import NoValue -from dbt_common.semver import VersionSpecifier, versions_compatible -from dbt.version import get_installed_version -from dbt.utils import MultiDict, md5, coerce_dict_str from dbt.node_types import NodeType -from dbt.config.selectors import SelectorDict -from dbt.contracts.project import ( - Project as ProjectContract, - SemverString, - ProjectFlags, -) -from dbt.contracts.project import PackageConfig, ProjectPackageMetadata +from dbt.utils import MultiDict, coerce_dict_str, md5 +from dbt.version import get_installed_version +from dbt_common.clients.system import load_file_contents, path_exists from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import SemverError +from dbt_common.helper_types import NoValue +from dbt_common.semver import VersionSpecifier, versions_compatible + from .renderer import DbtProjectYamlRenderer, PackageRenderer from .selectors import ( + SelectorConfig, selector_config_from_data, selector_data_from_root, - SelectorConfig, ) - INVALID_VERSION_ERROR = """\ This version of dbt is not supported with the '{package}' package. Installed version of dbt: {installed} @@ -835,10 +826,21 @@ def read_project_flags(project_dir: str, profiles_dir: str) -> ProjectFlags: project_flags = profile_project_flags if project_flags is not None: + # handle collapsing `include` and `error` as well as collapsing `exclude` and `warn` + # for warn_error_options + warn_error_options = project_flags.get("warn_error_options") + exclusive_primary_alt_value_setting( + warn_error_options, "include", "error", "warn_error_options" + ) + exclusive_primary_alt_value_setting( + warn_error_options, "exclude", "warn", "warn_error_options" + ) + ProjectFlags.validate(project_flags) return ProjectFlags.from_dict(project_flags) - except (DbtProjectError) as exc: - # We don't want to eat the DbtProjectError for UserConfig to ProjectFlags + except (DbtProjectError, DbtExclusivePropertyUseError) as exc: + # We don't want to eat the DbtProjectError for UserConfig to ProjectFlags or + # DbtConfigError for warn_error_options munging raise exc except (DbtRuntimeError, ValidationError): pass diff --git a/core/dbt/config/renderer.py b/core/dbt/config/renderer.py index eee740893b8..4f605979e62 100644 --- a/core/dbt/config/renderer.py +++ b/core/dbt/config/renderer.py @@ -1,20 +1,20 @@ -from typing import Dict, Any, Tuple, Optional, Union, Callable import re from datetime import date +from typing import Any, Callable, Dict, Optional, Tuple, Union +from dbt.adapters.contracts.connection import HasCredentials from dbt.clients.jinja import get_rendered -from dbt_common.clients.jinja import catch_jinja -from dbt.constants import SECRET_ENV_PREFIX, DEPENDENCIES_FILE_NAME -from dbt.context.target import TargetContext -from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER +from dbt.constants import DEPENDENCIES_FILE_NAME, SECRET_PLACEHOLDER from dbt.context.base import BaseContext -from dbt.adapters.contracts.connection import HasCredentials +from dbt.context.secret import SecretContext +from dbt.context.target import TargetContext from dbt.exceptions import DbtProjectError +from dbt_common.clients.jinja import catch_jinja +from dbt_common.constants import SECRET_ENV_PREFIX from dbt_common.context import get_invocation_context from dbt_common.exceptions import CompilationError, RecursionError from dbt_common.utils import deep_map_render - Keypath = Tuple[Union[str, int], ...] diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py index e32005aa91f..f64fd22775b 100644 --- a/core/dbt/config/runtime.py +++ b/core/dbt/config/runtime.py @@ -16,24 +16,29 @@ ) from dbt import tracking -from dbt.adapters.factory import get_include_paths, get_relation_class_by_name -from dbt.adapters.contracts.connection import AdapterRequiredConfig, Credentials, HasCredentials +from dbt.adapters.contracts.connection import ( + AdapterRequiredConfig, + Credentials, + HasCredentials, +) from dbt.adapters.contracts.relation import ComponentName -from dbt.flags import get_flags +from dbt.adapters.factory import get_include_paths, get_relation_class_by_name from dbt.config.project import load_raw_project from dbt.contracts.graph.manifest import ManifestMetadata from dbt.contracts.project import Configuration -from dbt_common.dataclass_schema import ValidationError -from dbt_common.events.functions import warn_or_error from dbt.events.types import UnusedResourceConfigPath from dbt.exceptions import ( ConfigContractBrokenError, DbtProjectError, - NonUniquePackageNameError, DbtRuntimeError, + NonUniquePackageNameError, UninstalledPackagesFoundError, ) +from dbt.flags import get_flags +from dbt_common.dataclass_schema import ValidationError +from dbt_common.events.functions import warn_or_error from dbt_common.helper_types import DictDefaultEmptyStr, FQNPath, PathSet + from .profile import Profile from .project import Project from .renderer import DbtProjectYamlRenderer, ProfileRenderer diff --git a/core/dbt/config/selectors.py b/core/dbt/config/selectors.py index 4602abbf84e..82ab388a456 100644 --- a/core/dbt/config/selectors.py +++ b/core/dbt/config/selectors.py @@ -1,21 +1,21 @@ -from pathlib import Path from copy import deepcopy -from typing import Dict, Any, Union -from dbt.clients.yaml_helper import yaml, Loader, Dumper, load_yaml_text # noqa: F401 -from dbt_common.dataclass_schema import ValidationError - -from .renderer import BaseRenderer +from pathlib import Path +from typing import Any, Dict, Union +from dbt.clients.yaml_helper import Dumper, Loader, load_yaml_text, yaml # noqa: F401 +from dbt.contracts.selection import SelectorFile +from dbt.exceptions import DbtSelectorsError +from dbt.graph import SelectionSpec, parse_from_selectors_definition +from dbt.graph.selector_spec import SelectionCriteria from dbt_common.clients.system import ( load_file_contents, path_exists, resolve_path_from_base, ) -from dbt.contracts.selection import SelectorFile -from dbt.exceptions import DbtSelectorsError +from dbt_common.dataclass_schema import ValidationError from dbt_common.exceptions import DbtRuntimeError -from dbt.graph import parse_from_selectors_definition, SelectionSpec -from dbt.graph.selector_spec import SelectionCriteria + +from .renderer import BaseRenderer MALFORMED_SELECTOR_ERROR = """\ The selectors.yml file in this project is malformed. Please double check diff --git a/core/dbt/config/utils.py b/core/dbt/config/utils.py index bb5546cc743..6f7f1266309 100644 --- a/core/dbt/config/utils.py +++ b/core/dbt/config/utils.py @@ -1,10 +1,9 @@ -from typing import Any, Dict - +from typing import Any, Dict, Optional from dbt.clients import yaml_helper -from dbt_common.events.functions import fire_event from dbt.events.types import InvalidOptionYAML -from dbt.exceptions import OptionNotYamlDictError +from dbt.exceptions import DbtExclusivePropertyUseError, OptionNotYamlDictError +from dbt_common.events.functions import fire_event from dbt_common.exceptions import DbtValidationError @@ -23,3 +22,32 @@ def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, An except (DbtValidationError, OptionNotYamlDictError): fire_event(InvalidOptionYAML(option_name=cli_option_name)) raise + + +def exclusive_primary_alt_value_setting( + dictionary: Optional[Dict[str, Any]], + primary: str, + alt: str, + parent_config: Optional[str] = None, +) -> None: + """Munges in place under the primary the options for the primary and alt values + + Sometimes we allow setting something via TWO keys, but not at the same time. If both the primary + key and alt key have values, an error gets raised. If the alt key has values, then we update + the dictionary to ensure the primary key contains the values. If neither are set, nothing happens. + """ + + if dictionary is None: + return + + primary_options = dictionary.get(primary) + alt_options = dictionary.get(alt) + + if primary_options and alt_options: + where = f" in `{parent_config}`" if parent_config is not None else "" + raise DbtExclusivePropertyUseError( + f"Only `{alt}` or `{primary}` can be specified{where}, not both" + ) + + if alt_options: + dictionary[primary] = alt_options diff --git a/core/dbt/constants.py b/core/dbt/constants.py index 31e4d833185..feaf0957746 100644 --- a/core/dbt/constants.py +++ b/core/dbt/constants.py @@ -1,7 +1,7 @@ -# TODO: remove SECRET_ENV_PREFIX and import from dbt_common -SECRET_ENV_PREFIX = "DBT_ENV_SECRET_" DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER" +SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$" + MAXIMUM_SEED_SIZE = 1 * 1024 * 1024 MAXIMUM_SEED_SIZE_NAME = "1MB" diff --git a/core/dbt/context/base.py b/core/dbt/context/base.py index e969506f625..5b8fd45e350 100644 --- a/core/dbt/context/base.py +++ b/core/dbt/context/base.py @@ -1,38 +1,45 @@ from __future__ import annotations +import datetime +import itertools import json import os -from typing import Any, Callable, Dict, NoReturn, Optional, Mapping, Iterable, Set, List +import re import threading +from typing import Any, Callable, Dict, Iterable, List, Mapping, NoReturn, Optional, Set + +# These modules are added to the context. Consider alternative +# approaches which will extend well to potentially many modules +import pytz -from dbt.flags import get_flags import dbt.flags as flags_module -from dbt import tracking -from dbt import utils +from dbt import tracking, utils from dbt.clients.jinja import get_rendered -from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401 -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER +from dbt.clients.yaml_helper import ( # noqa: F401 + Dumper, + Loader, + SafeLoader, + safe_load, + yaml, +) +from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_PLACEHOLDER from dbt.contracts.graph.nodes import Resource +from dbt.events.types import JinjaLogDebug, JinjaLogInfo from dbt.exceptions import ( - SecretEnvVarLocationError, EnvVarMissingError, RequiredVarNotFoundError, + SecretEnvVarLocationError, SetStrictWrongTypeError, ZipStrictWrongTypeError, ) +from dbt.flags import get_flags +from dbt.version import __version__ as dbt_version +from dbt_common.constants import SECRET_ENV_PREFIX from dbt_common.context import get_invocation_context -from dbt_common.exceptions.macros import MacroReturn -from dbt_common.events.functions import fire_event, get_invocation_id -from dbt.events.types import JinjaLogInfo, JinjaLogDebug from dbt_common.events.contextvars import get_node_info -from dbt.version import __version__ as dbt_version - -# These modules are added to the context. Consider alternative -# approaches which will extend well to potentially many modules -import pytz -import datetime -import re -import itertools +from dbt_common.events.functions import fire_event, get_invocation_id +from dbt_common.events.types import PrintEvent +from dbt_common.exceptions.macros import MacroReturn # See the `contexts` module README for more information on how contexts work @@ -328,6 +335,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: def debug(): """Enter a debugger at this line in the compiled jinja code.""" import sys + import ipdb # type: ignore frame = sys._getframe(3) @@ -561,6 +569,18 @@ def log(msg: str, info: bool = False) -> str: {{ log("Running some_macro: " ~ arg1 ~ ", " ~ arg2) }} {% endmacro %}" """ + # Detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$) + # and replace it with the standard mask '*****' + if "DBT_SECRET_START" in str(msg): + search_group = f"({SECRET_ENV_PREFIX}(.*))" + pattern = SECRET_PLACEHOLDER.format(search_group).replace("$", r"\$") + m = re.search( + pattern, + msg, + ) + if m: + msg = re.sub(pattern, "*****", msg) + if info: fire_event(JinjaLogInfo(msg=msg, node_info=get_node_info())) else: @@ -664,7 +684,8 @@ def print(msg: str) -> str: """ if get_flags().PRINT: - print(msg) + # No formatting, still get to stdout when --quiet is used + fire_event(PrintEvent(msg=msg)) return "" @contextmember() diff --git a/core/dbt/context/configured.py b/core/dbt/context/configured.py index c5c95bbfbcb..240d9afb843 100644 --- a/core/dbt/context/configured.py +++ b/core/dbt/context/configured.py @@ -1,15 +1,14 @@ from typing import Any, Dict, Optional -from dbt_common.context import get_invocation_context - -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER from dbt.adapters.contracts.connection import AdapterRequiredConfig -from dbt.node_types import NodeType -from dbt.utils import MultiDict - -from dbt.context.base import contextproperty, contextmember, Var +from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.context.base import Var, contextmember, contextproperty from dbt.context.target import TargetContext from dbt.exceptions import EnvVarMissingError, SecretEnvVarLocationError +from dbt.node_types import NodeType +from dbt.utils import MultiDict +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context class ConfiguredContext(TargetContext): diff --git a/core/dbt/context/context_config.py b/core/dbt/context/context_config.py index e825a194b6c..b0664f33aba 100644 --- a/core/dbt/context/context_config.py +++ b/core/dbt/context/context_config.py @@ -1,15 +1,15 @@ from abc import abstractmethod from copy import deepcopy from dataclasses import dataclass -from typing import List, Iterator, Dict, Any, TypeVar, Generic, Optional +from typing import Any, Dict, Generic, Iterator, List, Optional, TypeVar from dbt.adapters.factory import get_config_class_by_name -from dbt.config import RuntimeConfig, Project, IsFQNResource +from dbt.config import IsFQNResource, Project, RuntimeConfig from dbt.contracts.graph.model_config import get_config_for -from dbt_common.contracts.config.base import BaseConfig, _listify -from dbt_common.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.utils import fqn_search +from dbt_common.contracts.config.base import BaseConfig, _listify +from dbt_common.exceptions import DbtInternalError @dataclass diff --git a/core/dbt/context/docs.py b/core/dbt/context/docs.py index 94f64709fc7..923e8d402b9 100644 --- a/core/dbt/context/docs.py +++ b/core/dbt/context/docs.py @@ -1,15 +1,11 @@ from typing import Any, Dict, Union -from dbt.exceptions import ( - DocTargetNotFoundError, - DocArgsError, -) from dbt.config.runtime import RuntimeConfig -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import Macro, ResultNode - from dbt.context.base import contextmember from dbt.context.configured import SchemaYamlContext +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import Macro, ResultNode +from dbt.exceptions import DocArgsError, DocTargetNotFoundError class DocsRuntimeContext(SchemaYamlContext): diff --git a/core/dbt/context/exceptions_jinja.py b/core/dbt/context/exceptions_jinja.py index a0d7c5af339..5c5867d1fca 100644 --- a/core/dbt/context/exceptions_jinja.py +++ b/core/dbt/context/exceptions_jinja.py @@ -1,37 +1,36 @@ import functools from typing import NoReturn -from dbt_common.events.functions import warn_or_error -from dbt.events.types import JinjaLogWarning - -from dbt_common.exceptions import ( - DbtRuntimeError, - NotImplementedError, - DbtDatabaseError, - DataclassNotDictError, -) from dbt.adapters.exceptions import ( - MissingConfigError, ColumnTypeMissingError, + MissingConfigError, MissingMaterializationError, RelationWrongTypeError, ) +from dbt.adapters.exceptions.cache import CacheInconsistencyError +from dbt.events.types import JinjaLogWarning from dbt.exceptions import ( - MissingRelationError, AmbiguousAliasError, AmbiguousCatalogMatchError, CompilationError, - DependencyNotFoundError, + ContractError, DependencyError, + DependencyNotFoundError, DuplicatePatchPathError, DuplicateResourceNameError, - PropertyYMLError, - ContractError, FailFastError, - scrub_secrets, + MissingRelationError, + PropertyYMLError, env_secrets, + scrub_secrets, +) +from dbt_common.events.functions import warn_or_error +from dbt_common.exceptions import ( + DataclassNotDictError, + DbtDatabaseError, + DbtRuntimeError, + NotImplementedError, ) -from dbt.adapters.exceptions.cache import CacheInconsistencyError def warn(msg, node=None): diff --git a/core/dbt/context/macro_resolver.py b/core/dbt/context/macro_resolver.py index d897c754049..ad497b3e885 100644 --- a/core/dbt/context/macro_resolver.py +++ b/core/dbt/context/macro_resolver.py @@ -1,8 +1,9 @@ from typing import Dict, MutableMapping, Optional + +from dbt.clients.jinja import MacroGenerator from dbt.contracts.graph.nodes import Macro from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME -from dbt.clients.jinja import MacroGenerator MacroNamespace = Dict[str, Macro] diff --git a/core/dbt/context/macros.py b/core/dbt/context/macros.py index c2442b1f4a8..954cc72c9a4 100644 --- a/core/dbt/context/macros.py +++ b/core/dbt/context/macros.py @@ -1,10 +1,9 @@ -from typing import Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set +from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Set, Union from dbt.clients.jinja import MacroGenerator, MacroStack from dbt.contracts.graph.nodes import Macro -from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError - +from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME FlatNamespace = Dict[str, MacroGenerator] NamespaceMember = Union[FlatNamespace, MacroGenerator] diff --git a/core/dbt/context/manifest.py b/core/dbt/context/manifest.py index 0d95fd3b95f..264f59a61e2 100644 --- a/core/dbt/context/manifest.py +++ b/core/dbt/context/manifest.py @@ -1,12 +1,11 @@ from typing import List -from dbt.clients.jinja import MacroStack from dbt.adapters.contracts.connection import AdapterRequiredConfig -from dbt.contracts.graph.manifest import Manifest +from dbt.clients.jinja import MacroStack from dbt.context.macro_resolver import TestMacroNamespace -from .base import contextproperty - +from dbt.contracts.graph.manifest import Manifest +from .base import contextproperty from .configured import ConfiguredContext from .macros import MacroNamespace, MacroNamespaceBuilder diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py index 90f0a508c8c..15be73b535d 100644 --- a/core/dbt/context/providers.py +++ b/core/dbt/context/providers.py @@ -1,86 +1,94 @@ import abc -from copy import deepcopy import os +from copy import deepcopy from typing import ( - Callable, + TYPE_CHECKING, Any, + Callable, Dict, - Optional, - Union, - List, - TypeVar, - Type, Iterable, + List, Mapping, + Optional, Tuple, - TYPE_CHECKING, + Type, + TypeVar, + Union, ) from typing_extensions import Protocol +from dbt import selected_resources from dbt.adapters.base.column import Column +from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.exceptions import MissingConfigError +from dbt.adapters.factory import ( + get_adapter, + get_adapter_package_names, + get_adapter_type_names, +) from dbt.artifacts.resources import NodeVersion, RefArgs -from dbt_common.clients.jinja import MacroProtocol -from dbt_common.context import get_invocation_context -from dbt.adapters.factory import get_adapter, get_adapter_package_names, get_adapter_type_names -from dbt.clients.jinja import get_rendered, MacroGenerator, MacroStack, UnitTestMacroGenerator -from dbt.config import RuntimeConfig, Project -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.context.base import contextmember, contextproperty, Var +from dbt.clients.jinja import ( + MacroGenerator, + MacroStack, + UnitTestMacroGenerator, + get_rendered, +) +from dbt.config import IsFQNResource, Project, RuntimeConfig +from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.context.base import Var, contextmember, contextproperty from dbt.context.configured import FQNLookup from dbt.context.context_config import ContextConfig from dbt.context.exceptions_jinja import wrapped_exports from dbt.context.macro_resolver import MacroResolver, TestMacroNamespace -from dbt.context.macros import MacroNamespaceBuilder, MacroNamespace +from dbt.context.macros import MacroNamespace, MacroNamespaceBuilder from dbt.context.manifest import ManifestContext -from dbt.adapters.contracts.connection import AdapterResponse -from dbt.contracts.graph.manifest import Manifest, Disabled +from dbt.contracts.graph.manifest import Disabled, Manifest +from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference from dbt.contracts.graph.nodes import ( - Macro, + AccessType, Exposure, - SeedNode, - SourceDefinition, - Resource, + Macro, ManifestNode, - AccessType, + Resource, + SeedNode, SemanticModel, + SourceDefinition, UnitTestNode, ) -from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference -from dbt_common.events.functions import get_metadata_vars -from dbt_common.exceptions import ( - DbtInternalError, - DbtRuntimeError, - DbtValidationError, - MacrosSourcesUnWriteableError, -) -from dbt.adapters.exceptions import MissingConfigError from dbt.exceptions import ( CompilationError, ConflictingConfigKeysError, - SecretEnvVarLocationError, + DbtReferenceError, EnvVarMissingError, InlineModelConfigError, - NumberSourceArgsError, - PersistDocsValueTypeError, LoadAgateTableNotSeedError, LoadAgateTableValueError, MacroDispatchArgError, MacroResultAlreadyLoadedError, MetricArgsError, + NumberSourceArgsError, OperationsCannotRefEphemeralNodesError, ParsingError, - RefBadContextError, + PersistDocsValueTypeError, RefArgsError, + RefBadContextError, + SecretEnvVarLocationError, TargetNotFoundError, - DbtReferenceError, ) -from dbt.config import IsFQNResource -from dbt.node_types import NodeType, ModelLanguage - +from dbt.node_types import ModelLanguage, NodeType from dbt.utils import MultiDict, args_to_dict -from dbt_common.utils import merge, AttrDict, cast_to_str -from dbt import selected_resources +from dbt_common.clients.jinja import MacroProtocol +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context +from dbt_common.events.functions import get_metadata_vars +from dbt_common.exceptions import ( + DbtInternalError, + DbtRuntimeError, + DbtValidationError, + MacrosSourcesUnWriteableError, +) +from dbt_common.utils import AttrDict, cast_to_str, merge if TYPE_CHECKING: import agate @@ -503,6 +511,7 @@ def resolve( self.model.package_name, ) + # Raise an error if the reference target is missing if target_model is None or isinstance(target_model, Disabled): raise TargetNotFoundError( node=self.model, @@ -512,6 +521,8 @@ def resolve( target_version=target_version, disabled=isinstance(target_model, Disabled), ) + + # Raise error if trying to reference a 'private' resource outside its 'group' elif self.manifest.is_invalid_private_ref( self.model, target_model, self.config.dependencies ): @@ -521,6 +532,7 @@ def resolve( access=AccessType.Private, scope=cast_to_str(target_model.group), ) + # Or a 'protected' resource outside its project/package namespace elif self.manifest.is_invalid_protected_ref( self.model, target_model, self.config.dependencies ): @@ -530,7 +542,6 @@ def resolve( access=AccessType.Protected, scope=target_model.package_name, ) - self.validate(target_model, target_name, target_package, target_version) return self.create_relation(target_model) @@ -538,6 +549,25 @@ def create_relation(self, target_model: ManifestNode) -> RelationProxy: if target_model.is_ephemeral_model: self.model.set_cte(target_model.unique_id, None) return self.Relation.create_ephemeral_from(target_model, limit=self.resolve_limit) + elif ( + hasattr(target_model, "defer_relation") + and target_model.defer_relation + and self.config.args.defer + and ( + # User has explicitly opted to prefer defer_relation for unselected resources + ( + self.config.args.favor_state + and target_model.unique_id not in selected_resources.SELECTED_RESOURCES + ) + # Or, this node's relation does not exist in the expected target location (cache lookup) + or not get_adapter(self.config).get_relation( + target_model.database, target_model.schema, target_model.identifier + ) + ) + ): + return self.Relation.create_from( + self.config, target_model.defer_relation, limit=self.resolve_limit + ) else: return self.Relation.create_from(self.config, target_model, limit=self.resolve_limit) diff --git a/core/dbt/context/secret.py b/core/dbt/context/secret.py index 6de99fd5e5b..3f2641323fe 100644 --- a/core/dbt/context/secret.py +++ b/core/dbt/context/secret.py @@ -1,15 +1,12 @@ from typing import Any, Dict, Optional +from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_PLACEHOLDER +from dbt.exceptions import EnvVarMissingError +from dbt_common.constants import SECRET_ENV_PREFIX from dbt_common.context import get_invocation_context from .base import BaseContext, contextmember -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.exceptions import EnvVarMissingError - - -SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$" - class SecretContext(BaseContext): """This context is used in profiles.yml + packages.yml. It can render secret diff --git a/core/dbt/contracts/files.py b/core/dbt/contracts/files.py index 714782161cc..2c78e97f977 100644 --- a/core/dbt/contracts/files.py +++ b/core/dbt/contracts/files.py @@ -1,12 +1,12 @@ import os from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Union from mashumaro.types import SerializableType -from typing import List, Optional, Union, Dict, Any -from dbt.constants import MAXIMUM_SEED_SIZE -from dbt_common.dataclass_schema import dbtClassMixin, StrEnum from dbt.artifacts.resources.base import FileHash +from dbt.constants import MAXIMUM_SEED_SIZE +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin from .util import SourceKey @@ -139,8 +139,8 @@ def _deserialize(cls, dct: Dict[str, int]): sf = SourceFile.from_dict(dct) return sf - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) # remove empty lists to save space dct_keys = list(dct.keys()) for key in dct_keys: @@ -226,8 +226,8 @@ def macro_patches(self): def source_patches(self): return self.sop - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) # Remove partial parsing specific data for key in ("pp_test_index", "pp_dict"): if key in dct: diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py index 852f4dce724..9ca11166388 100644 --- a/core/dbt/contracts/graph/manifest.py +++ b/core/dbt/contracts/graph/manifest.py @@ -1,30 +1,48 @@ import enum from collections import defaultdict from dataclasses import dataclass, field, replace -from itertools import chain, islice -from mashumaro.mixins.msgpack import DataClassMessagePackMixin +from itertools import chain from multiprocessing.synchronize import Lock from typing import ( + Any, + Callable, + ClassVar, DefaultDict, Dict, + Generic, List, - Optional, - Union, Mapping, MutableMapping, - Any, + Optional, Set, Tuple, TypeVar, - Callable, - Generic, - AbstractSet, - ClassVar, + Union, ) + from typing_extensions import Protocol -from dbt import tracking +import dbt_common.exceptions +import dbt_common.utils +from dbt import deprecations, tracking +from dbt.adapters.exceptions import ( + DuplicateMacroInPackageError, + DuplicateMaterializationNameError, +) + +# to preserve import paths +from dbt.artifacts.resources import BaseResource, DeferRelation, NodeVersion +from dbt.artifacts.resources.v1.config import NodeConfig +from dbt.artifacts.schemas.manifest import ManifestMetadata, UniqueID, WritableManifest +from dbt.contracts.files import ( + AnySourceFile, + FileHash, + FixtureSourceFile, + SchemaSourceFile, + SourceFile, +) from dbt.contracts.graph.nodes import ( + RESOURCE_CLASS_TO_NODE_CLASS, BaseNode, Documentation, Exposure, @@ -37,48 +55,33 @@ ModelNode, ResultNode, SavedQuery, + SeedNode, SemanticModel, SourceDefinition, - UnpatchedSourceDefinition, UnitTestDefinition, UnitTestFileFixture, - RESOURCE_CLASS_TO_NODE_CLASS, + UnpatchedSourceDefinition, ) from dbt.contracts.graph.unparsed import SourcePatch, UnparsedVersion -from dbt.flags import get_flags - -# to preserve import paths -from dbt.artifacts.resources import ( - NodeVersion, - DeferRelation, - BaseResource, -) -from dbt.artifacts.schemas.manifest import WritableManifest, ManifestMetadata, UniqueID -from dbt.contracts.files import ( - SourceFile, - SchemaSourceFile, - FileHash, - AnySourceFile, - FixtureSourceFile, -) from dbt.contracts.util import SourceKey -from dbt_common.dataclass_schema import dbtClassMixin - +from dbt.events.types import UnpinnedRefNewVersionAvailable from dbt.exceptions import ( + AmbiguousResourceNameRefError, CompilationError, DuplicateResourceNameError, - AmbiguousResourceNameRefError, ) -from dbt.adapters.exceptions import DuplicateMacroInPackageError, DuplicateMaterializationNameError -from dbt_common.helper_types import PathSet -from dbt_common.events.functions import fire_event -from dbt_common.events.contextvars import get_node_info -from dbt.events.types import MergedFromState, UnpinnedRefNewVersionAvailable -from dbt.node_types import NodeType, AccessType, REFABLE_NODE_TYPES, VERSIONED_NODE_TYPES +from dbt.flags import get_flags from dbt.mp_context import get_mp_context -import dbt_common.utils -import dbt_common.exceptions - +from dbt.node_types import ( + REFABLE_NODE_TYPES, + VERSIONED_NODE_TYPES, + AccessType, + NodeType, +) +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.contextvars import get_node_info +from dbt_common.events.functions import fire_event +from dbt_common.helper_types import PathSet PackageName = str DocName = str @@ -570,11 +573,29 @@ def __lt__(self, other: object) -> bool: class CandidateList(List[M]): - def last(self) -> Optional[Macro]: + def last_candidate( + self, valid_localities: Optional[List[Locality]] = None + ) -> Optional[MacroCandidate]: + """ + Obtain the last (highest precedence) MacroCandidate from the CandidateList of any locality in valid_localities. + If valid_localities is not specified, return the last MacroCandidate of any locality. + """ if not self: return None self.sort() - return self[-1].macro + + if valid_localities is None: + return self[-1] + + for candidate in reversed(self): + if candidate.locality in valid_localities: + return candidate + + return None + + def last(self) -> Optional[Macro]: + last_candidate = self.last_candidate() + return last_candidate.macro if last_candidate is not None else None def _get_locality(macro: Macro, root_project_name: str, internal_packages: Set[str]) -> Locality: @@ -783,7 +804,7 @@ class ManifestStateCheck(dbtClassMixin): @dataclass -class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): +class Manifest(MacroMethods, dbtClassMixin): """The manifest for the full graph, after parsing and during compilation.""" # These attributes are both positional and by keyword. If an attribute @@ -850,7 +871,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): metadata={"serialize": lambda x: None, "deserialize": lambda x: None}, ) - def __pre_serialize__(self): + def __pre_serialize__(self, context: Optional[Dict] = None): # serialization won't work with anything except an empty source_patches because # tuple keys are not supported, so ensure it's empty self.source_patches = {} @@ -930,7 +951,33 @@ def find_materialization_macro_by_name( for specificity, atype in enumerate(self._get_parent_adapter_types(adapter_type)) ) ) - return candidates.last() + core_candidates = [ + candidate for candidate in candidates if candidate.locality == Locality.Core + ] + + materialization_candidate = candidates.last_candidate() + # If an imported materialization macro was found that also had a core candidate, fire a deprecation + if ( + materialization_candidate is not None + and materialization_candidate.locality == Locality.Imported + and core_candidates + ): + # preserve legacy behaviour - allow materialization override + if ( + get_flags().require_explicit_package_overrides_for_builtin_materializations + is False + ): + deprecations.warn( + "package-materialization-override", + package_name=materialization_candidate.macro.package_name, + materialization_name=materialization_name, + ) + else: + materialization_candidate = candidates.last_candidate( + valid_localities=[Locality.Core, Locality.Root] + ) + + return materialization_candidate.macro if materialization_candidate else None def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {} @@ -1421,50 +1468,36 @@ def is_invalid_protected_ref( node.package_name != target_model.package_name and restrict_package_access ) - # Called by GraphRunnableTask.defer_to_manifest - def merge_from_artifact( - self, - adapter, - other: "Manifest", - selected: AbstractSet[UniqueID], - favor_state: bool = False, - ) -> None: - """Given the selected unique IDs and a writable manifest, update this - manifest by replacing any unselected nodes with their counterpart. + # Called in GraphRunnableTask.before_run, RunTask.before_run, CloneTask.before_run + def merge_from_artifact(self, other: "Manifest") -> None: + """Update this manifest by adding the 'defer_relation' attribute to all nodes + with a counterpart in the stateful manifest used for deferral. Only non-ephemeral refable nodes are examined. """ refables = set(REFABLE_NODE_TYPES) - merged = set() for unique_id, node in other.nodes.items(): current = self.nodes.get(unique_id) - if current and ( - node.resource_type in refables - and not node.is_ephemeral - and unique_id not in selected - and ( - not adapter.get_relation(current.database, current.schema, current.identifier) - or favor_state - ) - ): - merged.add(unique_id) - self.nodes[unique_id] = replace(node, deferred=True) - - # for all other nodes, add 'defer_relation' - elif current and node.resource_type in refables and not node.is_ephemeral: + if current and node.resource_type in refables and not node.is_ephemeral: + assert isinstance(node.config, NodeConfig) # this makes mypy happy defer_relation = DeferRelation( - node.database, node.schema, node.alias, node.relation_name + database=node.database, + schema=node.schema, + alias=node.alias, + relation_name=node.relation_name, + resource_type=node.resource_type, + name=node.name, + description=node.description, + compiled_code=(node.compiled_code if not isinstance(node, SeedNode) else None), + meta=node.meta, + tags=node.tags, + config=node.config, ) self.nodes[unique_id] = replace(current, defer_relation=defer_relation) - # Rebuild the flat_graph, which powers the 'graph' context variable, - # now that we've deferred some nodes + # Rebuild the flat_graph, which powers the 'graph' context variable self.build_flat_graph() - # log up to 5 items - sample = list(islice(merged, 5)) - fire_event(MergedFromState(num_merged=len(merged), sample=sample)) - # Methods that were formerly in ParseResult def add_macro(self, source_file: SourceFile, macro: Macro): if macro.unique_id in self.macros: diff --git a/core/dbt/contracts/graph/metrics.py b/core/dbt/contracts/graph/metrics.py index e9bb7694000..6b1a737aec8 100644 --- a/core/dbt/contracts/graph/metrics.py +++ b/core/dbt/contracts/graph/metrics.py @@ -1,8 +1,8 @@ -from dbt.contracts.graph.manifest import Manifest, Metric -from dbt_semantic_interfaces.type_enums import MetricType - from typing import Any, Dict, Iterator, List +from dbt_semantic_interfaces.type_enums import MetricType + +from dbt.contracts.graph.manifest import Manifest, Metric DERIVED_METRICS = [MetricType.DERIVED, MetricType.RATIO] BASE_METRICS = [MetricType.SIMPLE, MetricType.CUMULATIVE, MetricType.CONVERSION] diff --git a/core/dbt/contracts/graph/model_config.py b/core/dbt/contracts/graph/model_config.py index 8181f8b34bd..4658bca52e7 100644 --- a/core/dbt/contracts/graph/model_config.py +++ b/core/dbt/contracts/graph/model_config.py @@ -1,22 +1,22 @@ -from dataclasses import field, dataclass -from typing import Any, List, Optional, Dict, Type, Union +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Type, Union from dbt.artifacts.resources import ( ExposureConfig, MetricConfig, - SavedQueryConfig, - SemanticModelConfig, + ModelConfig, NodeConfig, + SavedQueryConfig, SeedConfig, - TestConfig, + SemanticModelConfig, SnapshotConfig, SourceConfig, - ModelConfig, + TestConfig, UnitTestConfig, ) +from dbt.node_types import NodeType from dbt_common.contracts.config.base import BaseConfig from dbt_common.contracts.config.metadata import Metadata -from dbt.node_types import NodeType def metas(*metas: Metadata) -> Dict[str, Any]: diff --git a/core/dbt/contracts/graph/node_args.py b/core/dbt/contracts/graph/node_args.py index cd19252275c..60b2a2aa1f8 100644 --- a/core/dbt/contracts/graph/node_args.py +++ b/core/dbt/contracts/graph/node_args.py @@ -1,9 +1,9 @@ from dataclasses import dataclass, field from datetime import datetime -from typing import Optional, List +from typing import List, Optional from dbt.artifacts.resources import NodeVersion -from dbt.node_types import NodeType, AccessType +from dbt.node_types import AccessType, NodeType @dataclass diff --git a/core/dbt/contracts/graph/nodes.py b/core/dbt/contracts/graph/nodes.py index e1f409ff1de..4cc72327332 100644 --- a/core/dbt/contracts/graph/nodes.py +++ b/core/dbt/contracts/graph/nodes.py @@ -1,95 +1,91 @@ +import hashlib import os -from datetime import datetime from dataclasses import dataclass, field -import hashlib - -from mashumaro.types import SerializableType +from datetime import datetime from typing import ( - Optional, - Union, - List, - Dict, Any, + Dict, + Iterator, + List, + Literal, + Optional, Sequence, Tuple, Type, - Iterator, - Literal, + Union, get_args, ) -from dbt import deprecations -from dbt_common.contracts.constraints import ConstraintType +from mashumaro.types import SerializableType -from dbt_common.clients.system import write_file +from dbt import deprecations +from dbt.artifacts.resources import Analysis as AnalysisResource +from dbt.artifacts.resources import ( + BaseResource, + ColumnInfo, + CompiledResource, + DependsOn, + Docs, +) +from dbt.artifacts.resources import Documentation as DocumentationResource +from dbt.artifacts.resources import Exposure as ExposureResource +from dbt.artifacts.resources import FileHash +from dbt.artifacts.resources import GenericTest as GenericTestResource +from dbt.artifacts.resources import GraphResource +from dbt.artifacts.resources import Group as GroupResource +from dbt.artifacts.resources import HasRelationMetadata as HasRelationMetadataResource +from dbt.artifacts.resources import HookNode as HookNodeResource +from dbt.artifacts.resources import InjectedCTE +from dbt.artifacts.resources import Macro as MacroResource +from dbt.artifacts.resources import MacroArgument +from dbt.artifacts.resources import Metric as MetricResource +from dbt.artifacts.resources import MetricInputMeasure +from dbt.artifacts.resources import Model as ModelResource +from dbt.artifacts.resources import ( + ModelConfig, + NodeConfig, + NodeVersion, + ParsedResource, + ParsedResourceMandatory, +) +from dbt.artifacts.resources import Quoting as QuotingResource +from dbt.artifacts.resources import SavedQuery as SavedQueryResource +from dbt.artifacts.resources import Seed as SeedResource +from dbt.artifacts.resources import SemanticModel as SemanticModelResource +from dbt.artifacts.resources import SingularTest as SingularTestResource +from dbt.artifacts.resources import Snapshot as SnapshotResource +from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource +from dbt.artifacts.resources import SqlOperation as SqlOperationResource +from dbt.artifacts.resources import UnitTestDefinition as UnitTestDefinitionResource +from dbt.contracts.graph.model_config import EmptySnapshotConfig, UnitTestNodeConfig +from dbt.contracts.graph.node_args import ModelNodeArgs from dbt.contracts.graph.unparsed import ( HasYamlMetadata, TestDef, + UnitTestOverrides, + UnparsedColumn, UnparsedSourceDefinition, UnparsedSourceTableDefinition, - UnparsedColumn, - UnitTestOverrides, -) -from dbt.contracts.graph.model_config import ( - UnitTestNodeConfig, - EmptySnapshotConfig, ) -from dbt.contracts.graph.node_args import ModelNodeArgs -from dbt_common.events.functions import warn_or_error -from dbt.exceptions import ParsingError, ContractBreakingChangeError, ValidationError from dbt.events.types import ( - SeedIncreased, - SeedExceedsLimitSamePath, SeedExceedsLimitAndPathChanged, SeedExceedsLimitChecksumChanged, + SeedExceedsLimitSamePath, + SeedIncreased, UnversionedBreakingChange, ) -from dbt_common.events.contextvars import set_log_contextvars +from dbt.exceptions import ContractBreakingChangeError, ParsingError, ValidationError from dbt.flags import get_flags from dbt.node_types import ( - NodeType, - AccessType, REFABLE_NODE_TYPES, VERSIONED_NODE_TYPES, + AccessType, + NodeType, ) - - -from dbt.artifacts.resources import ( - BaseResource, - DependsOn, - Docs, - Exposure as ExposureResource, - MacroArgument, - Documentation as DocumentationResource, - Macro as MacroResource, - Metric as MetricResource, - NodeVersion, - Group as GroupResource, - GraphResource, - SavedQuery as SavedQueryResource, - SemanticModel as SemanticModelResource, - ParsedResourceMandatory, - ParsedResource, - CompiledResource, - HasRelationMetadata as HasRelationMetadataResource, - FileHash, - NodeConfig, - ColumnInfo, - InjectedCTE, - Analysis as AnalysisResource, - HookNode as HookNodeResource, - Model as ModelResource, - ModelConfig, - SqlOperation as SqlOperationResource, - Seed as SeedResource, - SingularTest as SingularTestResource, - GenericTest as GenericTestResource, - Snapshot as SnapshotResource, - Quoting as QuotingResource, - SourceDefinition as SourceDefinitionResource, - MetricInputMeasure, - UnitTestDefinition as UnitTestDefinitionResource, -) +from dbt_common.clients.system import write_file +from dbt_common.contracts.constraints import ConstraintType +from dbt_common.events.contextvars import set_log_contextvars +from dbt_common.events.functions import warn_or_error # ===================================================================== # This contains the classes for all of the nodes and node-like objects @@ -262,8 +258,8 @@ def write_node(self, project_root: str, compiled_path, compiled_code: str): def _serialize(self): return self.to_dict() - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "_event_status" in dct: del dct["_event_status"] return dct @@ -606,8 +602,8 @@ def same_contract(self, old, adapter_type=None) -> bool: contract_enforced_disabled = True # TODO: this avoid the circular imports but isn't ideal - from dbt.adapters.factory import get_adapter_constraint_support from dbt.adapters.base import ConstraintSupport + from dbt.adapters.factory import get_adapter_constraint_support constraint_support = get_adapter_constraint_support(adapter_type) column_constraints_exist = False @@ -877,6 +873,11 @@ def language(self): return "sql" +# @property +# def compiled_code(self): +# return None + + # ==================================== # Singular Test node # ==================================== diff --git a/core/dbt/contracts/graph/semantic_manifest.py b/core/dbt/contracts/graph/semantic_manifest.py index e52f03a795e..cf268571425 100644 --- a/core/dbt/contracts/graph/semantic_manifest.py +++ b/core/dbt/contracts/graph/semantic_manifest.py @@ -3,7 +3,9 @@ PydanticProjectConfiguration, ) from dbt_semantic_interfaces.implementations.saved_query import PydanticSavedQuery -from dbt_semantic_interfaces.implementations.semantic_manifest import PydanticSemanticManifest +from dbt_semantic_interfaces.implementations.semantic_manifest import ( + PydanticSemanticManifest, +) from dbt_semantic_interfaces.implementations.semantic_model import PydanticSemanticModel from dbt_semantic_interfaces.implementations.time_spine_table_configuration import ( PydanticTimeSpineTableConfiguration, @@ -13,11 +15,11 @@ SemanticManifestValidator, ) +from dbt.events.types import SemanticValidationFailure +from dbt.exceptions import ParsingError from dbt_common.clients.system import write_file from dbt_common.events.base_types import EventLevel from dbt_common.events.functions import fire_event -from dbt.events.types import SemanticValidationFailure -from dbt.exceptions import ParsingError class SemanticManifest: diff --git a/core/dbt/contracts/graph/unparsed.py b/core/dbt/contracts/graph/unparsed.py index caeaa5cee85..f2fb390c69e 100644 --- a/core/dbt/contracts/graph/unparsed.py +++ b/core/dbt/contracts/graph/unparsed.py @@ -1,42 +1,44 @@ import datetime import re +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Dict, List, Literal, Optional, Sequence, Union +from dbt_semantic_interfaces.type_enums import ConversionCalculationType + +# trigger the PathEncoder +import dbt_common.helper_types # noqa:F401 from dbt import deprecations -from dbt.artifacts.resources import ConstantPropertyInput, Quoting -from dbt_common.contracts.config.properties import AdditionalPropertiesMixin -from dbt_common.contracts.util import Mergeable -from dbt_common.exceptions import DbtInternalError -from dbt_common.dataclass_schema import ( - dbtClassMixin, - StrEnum, - ExtensibleDbtClassMixin, - ValidationError, -) -from dbt.node_types import NodeType from dbt.artifacts.resources import ( + ConstantPropertyInput, Defaults, DimensionValidityParams, + Docs, ExposureType, ExternalTable, FreshnessThreshold, + MacroArgument, MaturityType, MeasureAggregationParameters, + NodeVersion, + Owner, + Quoting, UnitTestInputFixture, - UnitTestOutputFixture, UnitTestNodeVersions, + UnitTestOutputFixture, UnitTestOverrides, ) - -# trigger the PathEncoder -import dbt_common.helper_types # noqa:F401 from dbt.exceptions import ParsingError - -from dbt_semantic_interfaces.type_enums import ConversionCalculationType -from dbt.artifacts.resources import Docs, MacroArgument, NodeVersion, Owner - -from dataclasses import dataclass, field -from pathlib import Path -from typing import Optional, List, Union, Dict, Any, Sequence, Literal +from dbt.node_types import NodeType +from dbt_common.contracts.config.properties import AdditionalPropertiesMixin +from dbt_common.contracts.util import Mergeable +from dbt_common.dataclass_schema import ( + ExtensibleDbtClassMixin, + StrEnum, + ValidationError, + dbtClassMixin, +) +from dbt_common.exceptions import DbtInternalError @dataclass @@ -269,14 +271,15 @@ class UnparsedMacroUpdate(HasConfig, HasColumnProps, HasYamlMetadata): class UnparsedSourceTableDefinition(HasColumnTests, HasColumnAndTestProps): config: Dict[str, Any] = field(default_factory=dict) loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None identifier: Optional[str] = None quoting: Quoting = field(default_factory=Quoting) freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold) external: Optional[ExternalTable] = None tags: List[str] = field(default_factory=list) - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "freshness" not in dct and self.freshness is None: dct["freshness"] = None return dct @@ -293,16 +296,28 @@ class UnparsedSourceDefinition(dbtClassMixin): quoting: Quoting = field(default_factory=Quoting) freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold) loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None tables: List[UnparsedSourceTableDefinition] = field(default_factory=list) tags: List[str] = field(default_factory=list) config: Dict[str, Any] = field(default_factory=dict) + @classmethod + def validate(cls, data): + super(UnparsedSourceDefinition, cls).validate(data) + + if data.get("loaded_at_field", None) == "": + raise ValidationError("loaded_at_field cannot be an empty string.") + if "tables" in data: + for table in data["tables"]: + if table.get("loaded_at_field", None) == "": + raise ValidationError("loaded_at_field cannot be an empty string.") + @property def yaml_key(self) -> "str": return "sources" - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "freshness" not in dct and self.freshness is None: dct["freshness"] = None return dct @@ -316,6 +331,7 @@ class SourceTablePatch(dbtClassMixin): data_type: Optional[str] = None docs: Optional[Docs] = None loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None identifier: Optional[str] = None quoting: Quoting = field(default_factory=Quoting) freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold) @@ -358,6 +374,7 @@ class SourcePatch(dbtClassMixin): quoting: Optional[Quoting] = None freshness: Optional[Optional[FreshnessThreshold]] = field(default_factory=FreshnessThreshold) loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None tables: Optional[List[SourceTablePatch]] = None tags: Optional[List[str]] = None diff --git a/core/dbt/contracts/project.py b/core/dbt/contracts/project.py index 46c9742952f..b0b7179f333 100644 --- a/core/dbt/contracts/project.py +++ b/core/dbt/contracts/project.py @@ -1,20 +1,21 @@ +from dataclasses import dataclass, field +from typing import Any, ClassVar, Dict, List, Optional, Union + +from mashumaro.jsonschema.annotations import Pattern +from mashumaro.types import SerializableType +from typing_extensions import Annotated + from dbt import deprecations -from dbt.contracts.util import list_str, Identifier from dbt.adapters.contracts.connection import QueryComment -from dbt_common.helper_types import NoValue +from dbt.contracts.util import Identifier, list_str from dbt_common.contracts.util import Mergeable from dbt_common.dataclass_schema import ( - dbtClassMixin, - ValidationError, ExtensibleDbtClassMixin, + ValidationError, + dbtClassMixin, dbtMashConfig, ) -from dataclasses import dataclass, field -from typing import Optional, List, Dict, Union, Any, ClassVar -from typing_extensions import Annotated -from mashumaro.types import SerializableType -from mashumaro.jsonschema.annotations import Pattern - +from dbt_common.helper_types import NoValue DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True @@ -77,6 +78,16 @@ def get_revisions(self) -> List[str]: return [str(self.revision)] +@dataclass +class PrivatePackage(Package): + private: str + provider: Optional[str] = None + revision: Optional[RawVersion] = None + warn_unpinned: Optional[bool] = field(default=None, metadata={"alias": "warn-unpinned"}) + subdirectory: Optional[str] = None + unrendered: Dict[str, Any] = field(default_factory=dict) + + @dataclass class RegistryPackage(Package): package: str @@ -91,7 +102,7 @@ def get_versions(self) -> List[str]: return [str(self.version)] -PackageSpec = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage] +PackageSpec = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage, PrivatePackage] @dataclass @@ -101,13 +112,26 @@ class PackageConfig(dbtClassMixin): @classmethod def validate(cls, data): for package in data.get("packages", data): + # This can happen when the target is a variable that is not filled and results in hangs + if isinstance(package, dict): + if package.get("package") == "": + raise ValidationError( + "A hub package is missing the value. It is a required property." + ) + if package.get("local") == "": + raise ValidationError( + "A local package is missing the value. It is a required property." + ) + if package.get("git") == "": + raise ValidationError( + "A git package is missing the value. It is a required property." + ) if isinstance(package, dict) and package.get("package"): if not package["version"]: raise ValidationError( f"{package['package']} is missing the version. When installing from the Hub " "package index, version is a required property" ) - if "/" not in package["package"]: raise ValidationError( f"{package['package']} was not found in the package index. Packages on the index " @@ -296,7 +320,6 @@ def validate(cls, data): @dataclass class ProjectFlags(ExtensibleDbtClassMixin): - allow_spaces_in_model_names: Optional[bool] = True cache_selected_only: Optional[bool] = None debug: Optional[bool] = None fail_fast: Optional[bool] = None @@ -309,7 +332,6 @@ class ProjectFlags(ExtensibleDbtClassMixin): populate_cache: Optional[bool] = None printer_width: Optional[int] = None send_anonymous_usage_stats: bool = DEFAULT_SEND_ANONYMOUS_USAGE_STATS - source_freshness_run_project_hooks: bool = False static_parser: Optional[bool] = None use_colors: Optional[bool] = None use_colors_file: Optional[bool] = None @@ -319,11 +341,17 @@ class ProjectFlags(ExtensibleDbtClassMixin): warn_error_options: Optional[Dict[str, Union[str, List[str]]]] = None write_json: Optional[bool] = None + # legacy behaviors + require_explicit_package_overrides_for_builtin_materializations: bool = True + require_resource_names_without_spaces: bool = False + source_freshness_run_project_hooks: bool = False + @property def project_only_flags(self) -> Dict[str, Any]: return { + "require_explicit_package_overrides_for_builtin_materializations": self.require_explicit_package_overrides_for_builtin_materializations, + "require_resource_names_without_spaces": self.require_resource_names_without_spaces, "source_freshness_run_project_hooks": self.source_freshness_run_project_hooks, - "allow_spaces_in_model_names": self.allow_spaces_in_model_names, } diff --git a/core/dbt/contracts/results.py b/core/dbt/contracts/results.py index c0775b10aa6..79a190087c4 100644 --- a/core/dbt/contracts/results.py +++ b/core/dbt/contracts/results.py @@ -9,50 +9,45 @@ VersionedSchema, schema_version, ) - +from dbt.artifacts.schemas.catalog import ( + CatalogArtifact, + CatalogKey, + CatalogMetadata, + CatalogResults, + CatalogTable, + ColumnMetadata, + StatsItem, + TableMetadata, +) +from dbt.artifacts.schemas.freshness import ( + FreshnessErrorEnum, + FreshnessExecutionResultArtifact, + FreshnessMetadata, + FreshnessNodeOutput, + FreshnessNodeResult, + FreshnessResult, + PartialSourceFreshnessResult, + SourceFreshnessOutput, + SourceFreshnessResult, + SourceFreshnessRuntimeError, + process_freshness_result, +) from dbt.artifacts.schemas.results import ( + BaseResult, + ExecutionResult, + FreshnessStatus, + NodeResult, NodeStatus, + RunningStatus, RunStatus, TestStatus, - FreshnessStatus, - RunningStatus, TimingInfo, collect_timing_info, - BaseResult, - NodeResult, - ExecutionResult, ) - from dbt.artifacts.schemas.run import ( - RunResult, - RunResultsMetadata, RunExecutionResult, + RunResult, RunResultsArtifact, + RunResultsMetadata, process_run_result, ) - -from dbt.artifacts.schemas.freshness import ( - FreshnessErrorEnum, - FreshnessMetadata, - FreshnessResult, - FreshnessExecutionResultArtifact, - FreshnessNodeResult, - FreshnessNodeOutput, - process_freshness_result, - SourceFreshnessResult, - SourceFreshnessRuntimeError, - SourceFreshnessOutput, - PartialSourceFreshnessResult, -) - -from dbt.artifacts.schemas.catalog import ( - CatalogResults, - CatalogKey, - StatsItem, - ColumnMetadata, - TableMetadata, - CatalogTable, - CatalogMetadata, - CatalogResults, - CatalogArtifact, -) diff --git a/core/dbt/contracts/selection.py b/core/dbt/contracts/selection.py index 611338559d3..0a4d39bede7 100644 --- a/core/dbt/contracts/selection.py +++ b/core/dbt/contracts/selection.py @@ -1,7 +1,7 @@ from dataclasses import dataclass -from dbt_common.dataclass_schema import dbtClassMixin +from typing import Any, Dict, List, Union -from typing import List, Dict, Any, Union +from dbt_common.dataclass_schema import dbtClassMixin @dataclass diff --git a/core/dbt/contracts/sql.py b/core/dbt/contracts/sql.py index ec1033ef831..931290039bf 100644 --- a/core/dbt/contracts/sql.py +++ b/core/dbt/contracts/sql.py @@ -1,16 +1,13 @@ import uuid from dataclasses import dataclass, field from datetime import datetime -from typing import Optional, List, Any, Dict, Sequence +from typing import Any, Dict, List, Optional, Sequence -from dbt_common.dataclass_schema import dbtClassMixin - -from dbt.contracts.graph.nodes import ResultNode -from dbt.artifacts.schemas.results import TimingInfo, ExecutionResult -from dbt.artifacts.schemas.run import RunResult, RunResultsArtifact, RunExecutionResult from dbt.artifacts.schemas.base import VersionedSchema, schema_version -from dbt.logger import LogMessage - +from dbt.artifacts.schemas.results import ExecutionResult, TimingInfo +from dbt.artifacts.schemas.run import RunExecutionResult, RunResult, RunResultsArtifact +from dbt.contracts.graph.nodes import ResultNode +from dbt_common.dataclass_schema import dbtClassMixin TaskTags = Optional[Dict[str, Any]] TaskID = uuid.UUID @@ -19,12 +16,7 @@ @dataclass -class RemoteResult(VersionedSchema): - logs: List[LogMessage] - - -@dataclass -class RemoteCompileResultMixin(RemoteResult): +class RemoteCompileResultMixin(VersionedSchema): raw_code: str compiled_code: str node: ResultNode @@ -43,7 +35,7 @@ def error(self): @dataclass @schema_version("remote-execution-result", 1) -class RemoteExecutionResult(ExecutionResult, RemoteResult): +class RemoteExecutionResult(ExecutionResult): results: Sequence[RunResult] args: Dict[str, Any] = field(default_factory=dict) generated_at: datetime = field(default_factory=datetime.utcnow) @@ -61,14 +53,12 @@ def write(self, path: str): def from_local_result( cls, base: RunExecutionResult, - logs: List[LogMessage], ) -> "RemoteExecutionResult": return cls( generated_at=base.generated_at, results=base.results, elapsed_time=base.elapsed_time, args=base.args, - logs=logs, ) diff --git a/core/dbt/contracts/state.py b/core/dbt/contracts/state.py index 01a2eba958b..d65fe4d9b4f 100644 --- a/core/dbt/contracts/state.py +++ b/core/dbt/contracts/state.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional -from dbt.contracts.graph.manifest import Manifest from dbt.artifacts.exceptions import IncompatibleSchemaError -from dbt.artifacts.schemas.manifest import WritableManifest from dbt.artifacts.schemas.freshness import FreshnessExecutionResultArtifact +from dbt.artifacts.schemas.manifest import WritableManifest from dbt.artifacts.schemas.run import RunResultsArtifact -from dbt_common.events.functions import fire_event +from dbt.contracts.graph.manifest import Manifest from dbt.events.types import WarnStateTargetEqual +from dbt_common.events.functions import fire_event def load_result_state(results_path) -> Optional[RunResultsArtifact]: diff --git a/core/dbt/contracts/util.py b/core/dbt/contracts/util.py index 539744f0dc5..05157fa006f 100644 --- a/core/dbt/contracts/util.py +++ b/core/dbt/contracts/util.py @@ -1,10 +1,8 @@ -from typing import List, Any, Tuple - -from dbt_common.dataclass_schema import ValidatedStringMixin, ValidationError +from typing import Any, List, Tuple # Leave imports of `Mergeable` to preserve import paths from dbt_common.contracts.util import Mergeable # noqa:F401 - +from dbt_common.dataclass_schema import ValidatedStringMixin, ValidationError SourceKey = Tuple[str, str] diff --git a/core/dbt/deprecations.py b/core/dbt/deprecations.py index 1b011128fb8..53f95ee1e65 100644 --- a/core/dbt/deprecations.py +++ b/core/dbt/deprecations.py @@ -1,10 +1,9 @@ import abc -from typing import Optional, Set, List, Dict, ClassVar +from typing import ClassVar, Dict, List, Optional, Set import dbt.tracking - from dbt.events import types as core_types -from dbt_common.events.functions import warn_or_error, fire_event +from dbt_common.events.functions import fire_event, warn_or_error class DBTDeprecation: @@ -118,6 +117,21 @@ def show(self, *args, **kwargs) -> None: active_deprecations.add(self.name) +class PackageMaterializationOverrideDeprecation(DBTDeprecation): + _name = "package-materialization-override" + _event = "PackageMaterializationOverrideDeprecation" + + +class ResourceNamesWithSpacesDeprecation(DBTDeprecation): + _name = "resource-names-with-spaces" + _event = "ResourceNamesWithSpacesDeprecation" + + +class SourceFreshnessProjectHooksNotRun(DBTDeprecation): + _name = "source-freshness-project-hooks" + _event = "SourceFreshnessProjectHooksNotRun" + + def renamed_env_var(old_name: str, new_name: str): class EnvironmentVariableRenamed(DBTDeprecation): _name = f"environment-variable-renamed:{old_name}" @@ -157,6 +171,9 @@ def warn(name, *args, **kwargs): CollectFreshnessReturnSignature(), TestsConfigDeprecation(), ProjectFlagsMovedDeprecation(), + PackageMaterializationOverrideDeprecation(), + ResourceNamesWithSpacesDeprecation(), + SourceFreshnessProjectHooksNotRun(), ] deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list} diff --git a/core/dbt/deps/base.py b/core/dbt/deps/base.py index db48526b0e0..0d6dfaf20ed 100644 --- a/core/dbt/deps/base.py +++ b/core/dbt/deps/base.py @@ -1,15 +1,15 @@ import abc -import os import functools +import os import tempfile from contextlib import contextmanager from pathlib import Path -from typing import List, Optional, Generic, TypeVar, Dict +from typing import Dict, Generic, List, Optional, TypeVar -from dbt_common.clients import system from dbt.contracts.project import ProjectPackageMetadata -from dbt_common.events.functions import fire_event from dbt.events.types import DepsSetDownloadDirectory +from dbt_common.clients import system +from dbt_common.events.functions import fire_event from dbt_common.utils.connection import connection_exception_retry DOWNLOADS_PATH = None diff --git a/core/dbt/deps/git.py b/core/dbt/deps/git.py index 0d5e74a0156..c7f76423887 100644 --- a/core/dbt/deps/git.py +++ b/core/dbt/deps/git.py @@ -1,20 +1,22 @@ import os -from typing import List, Optional, Dict +from typing import Dict, List, Optional from dbt.clients import git -from dbt_common.clients import system from dbt.config.project import PartialProject, Project from dbt.config.renderer import PackageRenderer -from dbt.contracts.project import ( - ProjectPackageMetadata, - GitPackage, -) +from dbt.contracts.project import GitPackage, ProjectPackageMetadata from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path -from dbt_common.exceptions import ExecutableError +from dbt.events.types import DepsScrubbedPackageName, DepsUnpinned, EnsureGitInstalled from dbt.exceptions import MultipleVersionGitDepsError -from dbt_common.events.functions import fire_event, warn_or_error, scrub_secrets, env_secrets -from dbt.events.types import EnsureGitInstalled, DepsUnpinned, DepsScrubbedPackageName from dbt.utils import md5 +from dbt_common.clients import system +from dbt_common.events.functions import ( + env_secrets, + fire_event, + scrub_secrets, + warn_or_error, +) +from dbt_common.exceptions import ExecutableError def md5sum(s: str): diff --git a/core/dbt/deps/local.py b/core/dbt/deps/local.py index 9dd76b80161..869ac0c3055 100644 --- a/core/dbt/deps/local.py +++ b/core/dbt/deps/local.py @@ -1,16 +1,13 @@ import shutil from typing import Dict -from dbt_common.clients import system -from dbt.deps.base import PinnedPackage, UnpinnedPackage -from dbt.contracts.project import ( - ProjectPackageMetadata, - LocalPackage, -) -from dbt_common.events.functions import fire_event -from dbt.events.types import DepsCreatingLocalSymlink, DepsSymlinkNotAvailable from dbt.config.project import PartialProject, Project from dbt.config.renderer import PackageRenderer +from dbt.contracts.project import LocalPackage, ProjectPackageMetadata +from dbt.deps.base import PinnedPackage, UnpinnedPackage +from dbt.events.types import DepsCreatingLocalSymlink, DepsSymlinkNotAvailable +from dbt_common.clients import system +from dbt_common.events.functions import fire_event class LocalPackageMixin: diff --git a/core/dbt/deps/registry.py b/core/dbt/deps/registry.py index 408d42bab87..8943523e3f5 100644 --- a/core/dbt/deps/registry.py +++ b/core/dbt/deps/registry.py @@ -1,19 +1,16 @@ -from typing import List, Dict +from typing import Dict, List -from dbt_common import semver -from dbt.flags import get_flags -from dbt.version import get_installed_version from dbt.clients import registry -from dbt.contracts.project import ( - RegistryPackageMetadata, - RegistryPackage, -) +from dbt.contracts.project import RegistryPackage, RegistryPackageMetadata from dbt.deps.base import PinnedPackage, UnpinnedPackage from dbt.exceptions import ( DependencyError, PackageNotFoundError, PackageVersionNotFoundError, ) +from dbt.flags import get_flags +from dbt.version import get_installed_version +from dbt_common import semver from dbt_common.exceptions import VersionsNotCompatibleError diff --git a/core/dbt/deps/resolver.py b/core/dbt/deps/resolver.py index 5f890109b0e..b4a0c60ef6c 100644 --- a/core/dbt/deps/resolver.py +++ b/core/dbt/deps/resolver.py @@ -1,27 +1,26 @@ from dataclasses import dataclass, field -from typing import Dict, List, NoReturn, Type, Iterator, Set, Any - -from dbt.exceptions import ( - DuplicateDependencyToRootError, - DuplicateProjectDependencyError, - MismatchedDependencyTypeError, - DbtInternalError, -) +from typing import Any, Dict, Iterator, List, NoReturn, Set, Type from dbt.config import Project from dbt.config.renderer import PackageRenderer -from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage -from dbt.deps.local import LocalUnpinnedPackage -from dbt.deps.tarball import TarballUnpinnedPackage -from dbt.deps.git import GitUnpinnedPackage -from dbt.deps.registry import RegistryUnpinnedPackage - from dbt.contracts.project import ( - PackageSpec, - LocalPackage, - TarballPackage, GitPackage, + LocalPackage, + PackageSpec, + PrivatePackage, RegistryPackage, + TarballPackage, +) +from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage +from dbt.deps.git import GitUnpinnedPackage +from dbt.deps.local import LocalUnpinnedPackage +from dbt.deps.registry import RegistryUnpinnedPackage +from dbt.deps.tarball import TarballUnpinnedPackage +from dbt.exceptions import ( + DependencyError, + DuplicateDependencyToRootError, + DuplicateProjectDependencyError, + MismatchedDependencyTypeError, ) @@ -76,10 +75,14 @@ def update_from(self, src: List[PackageSpec]) -> None: pkg = TarballUnpinnedPackage.from_contract(contract) elif isinstance(contract, GitPackage): pkg = GitUnpinnedPackage.from_contract(contract) + elif isinstance(contract, PrivatePackage): + raise DependencyError( + f'Cannot resolve private package {contract.private} because git provider integration is missing. Please use a "git" package instead.' + ) elif isinstance(contract, RegistryPackage): pkg = RegistryUnpinnedPackage.from_contract(contract) else: - raise DbtInternalError("Invalid package type {}".format(type(contract))) + raise DependencyError("Invalid package type {}".format(type(contract))) self.incorporate(pkg) @classmethod diff --git a/core/dbt/deps/tarball.py b/core/dbt/deps/tarball.py index 77963469e06..d7874978e2b 100644 --- a/core/dbt/deps/tarball.py +++ b/core/dbt/deps/tarball.py @@ -3,13 +3,13 @@ from pathlib import Path from typing import Dict -from dbt_common.clients import system from dbt.config.project import PartialProject from dbt.contracts.project import TarballPackage from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path -from dbt.exceptions import DependencyError, scrub_secrets, env_secrets -from dbt_common.events.functions import warn_or_error from dbt.events.types import DepsScrubbedPackageName +from dbt.exceptions import DependencyError, env_secrets, scrub_secrets +from dbt_common.clients import system +from dbt_common.events.functions import warn_or_error from dbt_common.utils.connection import connection_exception_retry diff --git a/core/dbt/docs/source/_ext/dbt_click.py b/core/dbt/docs/source/_ext/dbt_click.py index 7343cc6a110..f51de96b7f5 100644 --- a/core/dbt/docs/source/_ext/dbt_click.py +++ b/core/dbt/docs/source/_ext/dbt_click.py @@ -1,11 +1,12 @@ +import traceback +import typing as t + import click import click.types as click_t -import dbt.cli.option_types as dbt_t from docutils import nodes from docutils.parsers.rst import Directive -import traceback -import typing as t +import dbt.cli.option_types as dbt_t PARAM_TYPE_MAP = { click_t.BoolParamType: lambda _: "boolean", diff --git a/core/dbt/docs/source/conf.py b/core/dbt/docs/source/conf.py index d9962bbfc8b..db6364a4266 100644 --- a/core/dbt/docs/source/conf.py +++ b/core/dbt/docs/source/conf.py @@ -1,5 +1,5 @@ -import sys import os +import sys import typing as t # Configuration file for the Sphinx documentation builder. diff --git a/core/dbt/events/__init__.py b/core/dbt/events/__init__.py index 8570ea527f7..123f242cae5 100644 --- a/core/dbt/events/__init__.py +++ b/core/dbt/events/__init__.py @@ -1,8 +1,8 @@ -from typing import Dict, Any, Set +from typing import Any, Dict, Set import dbt.adapters.events.types as adapter_dbt_event_types -import dbt_common.events.types as dbt_event_types import dbt.events.types as core_dbt_event_types +import dbt_common.events.types as dbt_event_types ALL_EVENT_TYPES: Dict[str, Any] = { **dbt_event_types.__dict__, diff --git a/core/dbt/events/base_types.py b/core/dbt/events/base_types.py index 39b45e9502e..a3ae0610849 100644 --- a/core/dbt/events/base_types.py +++ b/core/dbt/events/base_types.py @@ -1,14 +1,12 @@ # Aliasing common Level classes in order to make custom, but not overly-verbose versions that have PROTO_TYPES_MODULE set to the core-specific generated types_pb2 module -from dbt_common.events.base_types import ( - BaseEvent, - DynamicLevel as CommonDyanicLevel, - TestLevel as CommonTestLevel, - DebugLevel as CommonDebugLevel, - InfoLevel as CommonInfoLevel, - WarnLevel as CommonWarnLevel, - ErrorLevel as CommonErrorLevel, -) from dbt.events import core_types_pb2 +from dbt_common.events.base_types import BaseEvent +from dbt_common.events.base_types import DebugLevel as CommonDebugLevel +from dbt_common.events.base_types import DynamicLevel as CommonDyanicLevel +from dbt_common.events.base_types import ErrorLevel as CommonErrorLevel +from dbt_common.events.base_types import InfoLevel as CommonInfoLevel +from dbt_common.events.base_types import TestLevel as CommonTestLevel +from dbt_common.events.base_types import WarnLevel as CommonWarnLevel class CoreBaseEvent(BaseEvent): diff --git a/core/dbt/events/core_types.proto b/core/dbt/events/core_types.proto index c7c9d00c5ae..51da44e6b0d 100644 --- a/core/dbt/events/core_types.proto +++ b/core/dbt/events/core_types.proto @@ -404,27 +404,45 @@ message ProjectFlagsMovedDeprecationMsg { } // D014 -message SpacesInModelNameDeprecation { - string model_name = 1; - string model_version = 2; - string level = 3; +message SpacesInResourceNameDeprecation { + string unique_id = 1; + string level = 2; } -message SpacesInModelNameDeprecationMsg { +message SpacesInResourceNameDeprecationMsg { CoreEventInfo info = 1; - SpacesInModelNameDeprecation data = 2; + SpacesInResourceNameDeprecation data = 2; } // D015 -message TotalModelNamesWithSpacesDeprecation { +message ResourceNamesWithSpacesDeprecation { int32 count_invalid_names = 1; bool show_debug_hint = 2; string level = 3; } -message TotalModelNamesWithSpacesDeprecationMsg { +message ResourceNamesWithSpacesDeprecationMsg { + CoreEventInfo info = 1; + ResourceNamesWithSpacesDeprecation data = 2; +} + +// D016 +message PackageMaterializationOverrideDeprecation { + string package_name = 1; + string materialization_name = 2; +} + +message PackageMaterializationOverrideDeprecationMsg { + CoreEventInfo info = 1; + PackageMaterializationOverrideDeprecation data = 2; +} + +// D017 +message SourceFreshnessProjectHooksNotRun {} + +message SourceFreshnessProjectHooksNotRunMsg { CoreEventInfo info = 1; - TotalModelNamesWithSpacesDeprecation data = 2; + SourceFreshnessProjectHooksNotRun data = 2; } // I065 diff --git a/core/dbt/events/core_types_pb2.py b/core/dbt/events/core_types_pb2.py index 32754125a38..3930e1e53ec 100644 --- a/core/dbt/events/core_types_pb2.py +++ b/core/dbt/events/core_types_pb2.py @@ -16,7 +16,7 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x63ore_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x99\x02\n\rCoreEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x05\x65xtra\x18\t \x03(\x0b\x32%.proto_types.CoreEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"V\n\x0cNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x91\x02\n\x08NodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x30\n\rnode_relation\x18\n \x01(\x0b\x32\x19.proto_types.NodeRelation\"\x7f\n\rTimingInfoMsg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nstarted_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xd1\x01\n\x0cRunResultMsg\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12/\n\x0btiming_info\x18\x03 \x03(\x0b\x32\x1a.proto_types.TimingInfoMsg\x12\x0e\n\x06thread\x18\x04 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x31\n\x10\x61\x64\x61pter_response\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"\\\n\nColumnType\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x1c\n\x14previous_column_type\x18\x02 \x01(\t\x12\x1b\n\x13\x63urrent_column_type\x18\x03 \x01(\t\"Y\n\x10\x43olumnConstraint\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x17\n\x0f\x63onstraint_name\x18\x02 \x01(\t\x12\x17\n\x0f\x63onstraint_type\x18\x03 \x01(\t\"T\n\x0fModelConstraint\x12\x17\n\x0f\x63onstraint_name\x18\x01 \x01(\t\x12\x17\n\x0f\x63onstraint_type\x18\x02 \x01(\t\x12\x0f\n\x07\x63olumns\x18\x03 \x03(\t\"9\n\x11MainReportVersion\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x13\n\x0blog_version\x18\x02 \x01(\x05\"n\n\x14MainReportVersionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.MainReportVersion\"r\n\x0eMainReportArgs\x12\x33\n\x04\x61rgs\x18\x01 \x03(\x0b\x32%.proto_types.MainReportArgs.ArgsEntry\x1a+\n\tArgsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"h\n\x11MainReportArgsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainReportArgs\"+\n\x15MainTrackingUserState\x12\x12\n\nuser_state\x18\x01 \x01(\t\"v\n\x18MainTrackingUserStateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainTrackingUserState\"5\n\x0fMergedFromState\x12\x12\n\nnum_merged\x18\x01 \x01(\x05\x12\x0e\n\x06sample\x18\x02 \x03(\t\"j\n\x12MergedFromStateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.MergedFromState\"A\n\x14MissingProfileTarget\x12\x14\n\x0cprofile_name\x18\x01 \x01(\t\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\"t\n\x17MissingProfileTargetMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MissingProfileTarget\"(\n\x11InvalidOptionYAML\x12\x13\n\x0boption_name\x18\x01 \x01(\t\"n\n\x14InvalidOptionYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.InvalidOptionYAML\"!\n\x12LogDbtProjectError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"p\n\x15LogDbtProjectErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProjectError\"3\n\x12LogDbtProfileError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08profiles\x18\x02 \x03(\t\"p\n\x15LogDbtProfileErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProfileError\"!\n\x12StarterProjectPath\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"p\n\x15StarterProjectPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StarterProjectPath\"$\n\x15\x43onfigFolderDirectory\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"v\n\x18\x43onfigFolderDirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConfigFolderDirectory\"\'\n\x14NoSampleProfileFound\x12\x0f\n\x07\x61\x64\x61pter\x18\x01 \x01(\t\"t\n\x17NoSampleProfileFoundMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NoSampleProfileFound\"6\n\x18ProfileWrittenWithSample\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"|\n\x1bProfileWrittenWithSampleMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProfileWrittenWithSample\"B\n$ProfileWrittenWithTargetTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x94\x01\n\'ProfileWrittenWithTargetTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.ProfileWrittenWithTargetTemplateYAML\"C\n%ProfileWrittenWithProjectTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x96\x01\n(ProfileWrittenWithProjectTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.ProfileWrittenWithProjectTemplateYAML\"\x12\n\x10SettingUpProfile\"l\n\x13SettingUpProfileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SettingUpProfile\"\x1c\n\x1aInvalidProfileTemplateYAML\"\x80\x01\n\x1dInvalidProfileTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.InvalidProfileTemplateYAML\"(\n\x18ProjectNameAlreadyExists\x12\x0c\n\x04name\x18\x01 \x01(\t\"|\n\x1bProjectNameAlreadyExistsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProjectNameAlreadyExists\"K\n\x0eProjectCreated\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x10\n\x08\x64ocs_url\x18\x02 \x01(\t\x12\x11\n\tslack_url\x18\x03 \x01(\t\"h\n\x11ProjectCreatedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ProjectCreated\"@\n\x1aPackageRedirectDeprecation\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x80\x01\n\x1dPackageRedirectDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.PackageRedirectDeprecation\"\x1f\n\x1dPackageInstallPathDeprecation\"\x86\x01\n PackageInstallPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.PackageInstallPathDeprecation\"H\n\x1b\x43onfigSourcePathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"\x82\x01\n\x1e\x43onfigSourcePathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigSourcePathDeprecation\"F\n\x19\x43onfigDataPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"~\n\x1c\x43onfigDataPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConfigDataPathDeprecation\".\n\x17MetricAttributesRenamed\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"z\n\x1aMetricAttributesRenamedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.MetricAttributesRenamed\"+\n\x17\x45xposureNameDeprecation\x12\x10\n\x08\x65xposure\x18\x01 \x01(\t\"z\n\x1a\x45xposureNameDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.ExposureNameDeprecation\"^\n\x13InternalDeprecation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x18\n\x10suggested_action\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"r\n\x16InternalDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.InternalDeprecation\"@\n\x1a\x45nvironmentVariableRenamed\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x80\x01\n\x1d\x45nvironmentVariableRenamedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.EnvironmentVariableRenamed\"3\n\x18\x43onfigLogPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"|\n\x1b\x43onfigLogPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ConfigLogPathDeprecation\"6\n\x1b\x43onfigTargetPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"\x82\x01\n\x1e\x43onfigTargetPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigTargetPathDeprecation\"C\n\x16TestsConfigDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"x\n\x19TestsConfigDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.TestsConfigDeprecation\"\x1e\n\x1cProjectFlagsMovedDeprecation\"\x84\x01\n\x1fProjectFlagsMovedDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.ProjectFlagsMovedDeprecation\"X\n\x1cSpacesInModelNameDeprecation\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\r\n\x05level\x18\x03 \x01(\t\"\x84\x01\n\x1fSpacesInModelNameDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.SpacesInModelNameDeprecation\"k\n$TotalModelNamesWithSpacesDeprecation\x12\x1b\n\x13\x63ount_invalid_names\x18\x01 \x01(\x05\x12\x17\n\x0fshow_debug_hint\x18\x02 \x01(\x08\x12\r\n\x05level\x18\x03 \x01(\t\"\x94\x01\n\'TotalModelNamesWithSpacesDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.TotalModelNamesWithSpacesDeprecation\"V\n\x0f\x44\x65precatedModel\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\x18\n\x10\x64\x65precation_date\x18\x03 \x01(\t\"j\n\x12\x44\x65precatedModelMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DeprecatedModel\"7\n\x12InputFileDiffError\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12\x0f\n\x07\x66ile_id\x18\x02 \x01(\t\"p\n\x15InputFileDiffErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InputFileDiffError\"?\n\x14InvalidValueForField\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12\x13\n\x0b\x66ield_value\x18\x02 \x01(\t\"t\n\x17InvalidValueForFieldMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.InvalidValueForField\"Q\n\x11ValidationWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x12\n\nfield_name\x18\x02 \x01(\t\x12\x11\n\tnode_name\x18\x03 \x01(\t\"n\n\x14ValidationWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ValidationWarning\"!\n\x11ParsePerfInfoPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"n\n\x14ParsePerfInfoPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ParsePerfInfoPath\"1\n!PartialParsingErrorProcessingFile\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\"\x8e\x01\n$PartialParsingErrorProcessingFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.PartialParsingErrorProcessingFile\"\x86\x01\n\x13PartialParsingError\x12?\n\x08\x65xc_info\x18\x01 \x03(\x0b\x32-.proto_types.PartialParsingError.ExcInfoEntry\x1a.\n\x0c\x45xcInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"r\n\x16PartialParsingErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.PartialParsingError\"\x1b\n\x19PartialParsingSkipParsing\"~\n\x1cPartialParsingSkipParsingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.PartialParsingSkipParsing\"&\n\x14UnableToPartialParse\x12\x0e\n\x06reason\x18\x01 \x01(\t\"t\n\x17UnableToPartialParseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.UnableToPartialParse\"f\n\x12StateCheckVarsHash\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\x0c\n\x04vars\x18\x02 \x01(\t\x12\x0f\n\x07profile\x18\x03 \x01(\t\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\x0f\n\x07version\x18\x05 \x01(\t\"p\n\x15StateCheckVarsHashMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StateCheckVarsHash\"\x1a\n\x18PartialParsingNotEnabled\"|\n\x1bPartialParsingNotEnabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.PartialParsingNotEnabled\"C\n\x14ParsedFileLoadFailed\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"t\n\x17ParsedFileLoadFailedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParsedFileLoadFailed\"H\n\x15PartialParsingEnabled\x12\x0f\n\x07\x64\x65leted\x18\x01 \x01(\x05\x12\r\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x05\x12\x0f\n\x07\x63hanged\x18\x03 \x01(\x05\"v\n\x18PartialParsingEnabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.PartialParsingEnabled\"8\n\x12PartialParsingFile\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\t\x12\x11\n\toperation\x18\x02 \x01(\t\"p\n\x15PartialParsingFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.PartialParsingFile\"\xaf\x01\n\x1fInvalidDisabledTargetInTestNode\x12\x1b\n\x13resource_type_title\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1a\n\x12original_file_path\x18\x03 \x01(\t\x12\x13\n\x0btarget_kind\x18\x04 \x01(\t\x12\x13\n\x0btarget_name\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\"\x8a\x01\n\"InvalidDisabledTargetInTestNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.InvalidDisabledTargetInTestNode\"7\n\x18UnusedResourceConfigPath\x12\x1b\n\x13unused_config_paths\x18\x01 \x03(\t\"|\n\x1bUnusedResourceConfigPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.UnusedResourceConfigPath\"3\n\rSeedIncreased\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"f\n\x10SeedIncreasedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.SeedIncreased\">\n\x18SeedExceedsLimitSamePath\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"|\n\x1bSeedExceedsLimitSamePathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.SeedExceedsLimitSamePath\"D\n\x1eSeedExceedsLimitAndPathChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x88\x01\n!SeedExceedsLimitAndPathChangedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.SeedExceedsLimitAndPathChanged\"\\\n\x1fSeedExceedsLimitChecksumChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x15\n\rchecksum_name\x18\x03 \x01(\t\"\x8a\x01\n\"SeedExceedsLimitChecksumChangedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.SeedExceedsLimitChecksumChanged\"%\n\x0cUnusedTables\x12\x15\n\runused_tables\x18\x01 \x03(\t\"d\n\x0fUnusedTablesMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.UnusedTables\"\x87\x01\n\x17WrongResourceSchemaFile\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x1c\n\x14plural_resource_type\x18\x03 \x01(\t\x12\x10\n\x08yaml_key\x18\x04 \x01(\t\x12\x11\n\tfile_path\x18\x05 \x01(\t\"z\n\x1aWrongResourceSchemaFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.WrongResourceSchemaFile\"K\n\x10NoNodeForYamlKey\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x10\n\x08yaml_key\x18\x02 \x01(\t\x12\x11\n\tfile_path\x18\x03 \x01(\t\"l\n\x13NoNodeForYamlKeyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.NoNodeForYamlKey\"+\n\x15MacroNotFoundForPatch\x12\x12\n\npatch_name\x18\x01 \x01(\t\"v\n\x18MacroNotFoundForPatchMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MacroNotFoundForPatch\"\xb8\x01\n\x16NodeNotFoundOrDisabled\x12\x1a\n\x12original_file_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1b\n\x13resource_type_title\x18\x03 \x01(\t\x12\x13\n\x0btarget_name\x18\x04 \x01(\t\x12\x13\n\x0btarget_kind\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\x12\x10\n\x08\x64isabled\x18\x07 \x01(\t\"x\n\x19NodeNotFoundOrDisabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.NodeNotFoundOrDisabled\"H\n\x0fJinjaLogWarning\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"j\n\x12JinjaLogWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.JinjaLogWarning\"E\n\x0cJinjaLogInfo\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"d\n\x0fJinjaLogInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.JinjaLogInfo\"F\n\rJinjaLogDebug\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"f\n\x10JinjaLogDebugMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.JinjaLogDebug\"\xae\x01\n\x1eUnpinnedRefNewVersionAvailable\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rref_node_name\x18\x02 \x01(\t\x12\x18\n\x10ref_node_package\x18\x03 \x01(\t\x12\x18\n\x10ref_node_version\x18\x04 \x01(\t\x12\x17\n\x0fref_max_version\x18\x05 \x01(\t\"\x88\x01\n!UnpinnedRefNewVersionAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.UnpinnedRefNewVersionAvailable\"\xc6\x01\n\x1cUpcomingReferenceDeprecation\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"\x84\x01\n\x1fUpcomingReferenceDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.UpcomingReferenceDeprecation\"\xbd\x01\n\x13\x44\x65precatedReference\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"r\n\x16\x44\x65precatedReferenceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DeprecatedReference\"<\n$UnsupportedConstraintMaterialization\x12\x14\n\x0cmaterialized\x18\x01 \x01(\t\"\x94\x01\n\'UnsupportedConstraintMaterializationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.UnsupportedConstraintMaterialization\"M\n\x14ParseInlineNodeError\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\"t\n\x17ParseInlineNodeErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParseInlineNodeError\"(\n\x19SemanticValidationFailure\x12\x0b\n\x03msg\x18\x02 \x01(\t\"~\n\x1cSemanticValidationFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.SemanticValidationFailure\"\x8a\x03\n\x19UnversionedBreakingChange\x12\x18\n\x10\x62reaking_changes\x18\x01 \x03(\t\x12\x12\n\nmodel_name\x18\x02 \x01(\t\x12\x17\n\x0fmodel_file_path\x18\x03 \x01(\t\x12\"\n\x1a\x63ontract_enforced_disabled\x18\x04 \x01(\x08\x12\x17\n\x0f\x63olumns_removed\x18\x05 \x03(\t\x12\x34\n\x13\x63olumn_type_changes\x18\x06 \x03(\x0b\x32\x17.proto_types.ColumnType\x12I\n\"enforced_column_constraint_removed\x18\x07 \x03(\x0b\x32\x1d.proto_types.ColumnConstraint\x12G\n!enforced_model_constraint_removed\x18\x08 \x03(\x0b\x32\x1c.proto_types.ModelConstraint\x12\x1f\n\x17materialization_changed\x18\t \x03(\t\"~\n\x1cUnversionedBreakingChangeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.UnversionedBreakingChange\"*\n\x14WarnStateTargetEqual\x12\x12\n\nstate_path\x18\x01 \x01(\t\"t\n\x17WarnStateTargetEqualMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.WarnStateTargetEqual\"%\n\x16\x46reshnessConfigProblem\x12\x0b\n\x03msg\x18\x01 \x01(\t\"x\n\x19\x46reshnessConfigProblemMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessConfigProblem\"/\n\x1dGitSparseCheckoutSubdirectory\x12\x0e\n\x06subdir\x18\x01 \x01(\t\"\x86\x01\n GitSparseCheckoutSubdirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.GitSparseCheckoutSubdirectory\"/\n\x1bGitProgressCheckoutRevision\x12\x10\n\x08revision\x18\x01 \x01(\t\"\x82\x01\n\x1eGitProgressCheckoutRevisionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.GitProgressCheckoutRevision\"4\n%GitProgressUpdatingExistingDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x96\x01\n(GitProgressUpdatingExistingDependencyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.GitProgressUpdatingExistingDependency\".\n\x1fGitProgressPullingNewDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x8a\x01\n\"GitProgressPullingNewDependencyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressPullingNewDependency\"\x1d\n\x0eGitNothingToDo\x12\x0b\n\x03sha\x18\x01 \x01(\t\"h\n\x11GitNothingToDoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.GitNothingToDo\"E\n\x1fGitProgressUpdatedCheckoutRange\x12\x11\n\tstart_sha\x18\x01 \x01(\t\x12\x0f\n\x07\x65nd_sha\x18\x02 \x01(\t\"\x8a\x01\n\"GitProgressUpdatedCheckoutRangeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressUpdatedCheckoutRange\"*\n\x17GitProgressCheckedOutAt\x12\x0f\n\x07\x65nd_sha\x18\x01 \x01(\t\"z\n\x1aGitProgressCheckedOutAtMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.GitProgressCheckedOutAt\")\n\x1aRegistryProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x80\x01\n\x1dRegistryProgressGETRequestMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.RegistryProgressGETRequest\"=\n\x1bRegistryProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x82\x01\n\x1eRegistryProgressGETResponseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RegistryProgressGETResponse\"_\n\x1dSelectorReportInvalidSelector\x12\x17\n\x0fvalid_selectors\x18\x01 \x01(\t\x12\x13\n\x0bspec_method\x18\x02 \x01(\t\x12\x10\n\x08raw_spec\x18\x03 \x01(\t\"\x86\x01\n SelectorReportInvalidSelectorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.SelectorReportInvalidSelector\"\x15\n\x13\x44\x65psNoPackagesFound\"r\n\x16\x44\x65psNoPackagesFoundMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsNoPackagesFound\"/\n\x17\x44\x65psStartPackageInstall\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"z\n\x1a\x44\x65psStartPackageInstallMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsStartPackageInstall\"\'\n\x0f\x44\x65psInstallInfo\x12\x14\n\x0cversion_name\x18\x01 \x01(\t\"j\n\x12\x44\x65psInstallInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DepsInstallInfo\"-\n\x13\x44\x65psUpdateAvailable\x12\x16\n\x0eversion_latest\x18\x01 \x01(\t\"r\n\x16\x44\x65psUpdateAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsUpdateAvailable\"\x0e\n\x0c\x44\x65psUpToDate\"d\n\x0f\x44\x65psUpToDateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUpToDate\",\n\x14\x44\x65psListSubdirectory\x12\x14\n\x0csubdirectory\x18\x01 \x01(\t\"t\n\x17\x44\x65psListSubdirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.DepsListSubdirectory\".\n\x1a\x44\x65psNotifyUpdatesAvailable\x12\x10\n\x08packages\x18\x01 \x03(\t\"\x80\x01\n\x1d\x44\x65psNotifyUpdatesAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.DepsNotifyUpdatesAvailable\".\n\x1fRegistryIndexProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x8a\x01\n\"RegistryIndexProgressGETRequestMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryIndexProgressGETRequest\"B\n RegistryIndexProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x8c\x01\n#RegistryIndexProgressGETResponseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12;\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32-.proto_types.RegistryIndexProgressGETResponse\"2\n\x1eRegistryResponseUnexpectedType\x12\x10\n\x08response\x18\x01 \x01(\t\"\x88\x01\n!RegistryResponseUnexpectedTypeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseUnexpectedType\"2\n\x1eRegistryResponseMissingTopKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x88\x01\n!RegistryResponseMissingTopKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseMissingTopKeys\"5\n!RegistryResponseMissingNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8e\x01\n$RegistryResponseMissingNestedKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.RegistryResponseMissingNestedKeys\"3\n\x1fRegistryResponseExtraNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8a\x01\n\"RegistryResponseExtraNestedKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryResponseExtraNestedKeys\"(\n\x18\x44\x65psSetDownloadDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\"|\n\x1b\x44\x65psSetDownloadDirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsSetDownloadDirectory\"-\n\x0c\x44\x65psUnpinned\x12\x10\n\x08revision\x18\x01 \x01(\t\x12\x0b\n\x03git\x18\x02 \x01(\t\"d\n\x0f\x44\x65psUnpinnedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUnpinned\"/\n\x1bNoNodesForSelectionCriteria\x12\x10\n\x08spec_raw\x18\x01 \x01(\t\"\x82\x01\n\x1eNoNodesForSelectionCriteriaMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.NoNodesForSelectionCriteria\")\n\x10\x44\x65psLockUpdating\x12\x15\n\rlock_filepath\x18\x01 \x01(\t\"l\n\x13\x44\x65psLockUpdatingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.DepsLockUpdating\"R\n\x0e\x44\x65psAddPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x19\n\x11packages_filepath\x18\x03 \x01(\t\"h\n\x11\x44\x65psAddPackageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DepsAddPackage\"\xa7\x01\n\x19\x44\x65psFoundDuplicatePackage\x12S\n\x0fremoved_package\x18\x01 \x03(\x0b\x32:.proto_types.DepsFoundDuplicatePackage.RemovedPackageEntry\x1a\x35\n\x13RemovedPackageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"~\n\x1c\x44\x65psFoundDuplicatePackageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.DepsFoundDuplicatePackage\"$\n\x12\x44\x65psVersionMissing\x12\x0e\n\x06source\x18\x01 \x01(\t\"p\n\x15\x44\x65psVersionMissingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.DepsVersionMissing\"/\n\x17\x44\x65psScrubbedPackageName\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"z\n\x1a\x44\x65psScrubbedPackageNameMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsScrubbedPackageName\"*\n\x1bRunningOperationCaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x82\x01\n\x1eRunningOperationCaughtErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RunningOperationCaughtError\"\x11\n\x0f\x43ompileComplete\"j\n\x12\x43ompileCompleteMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.CompileComplete\"\x18\n\x16\x46reshnessCheckComplete\"x\n\x19\x46reshnessCheckCompleteMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessCheckComplete\"\x1c\n\nSeedHeader\x12\x0e\n\x06header\x18\x01 \x01(\t\"`\n\rSeedHeaderMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SeedHeader\"]\n\x12SQLRunnerException\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x02 \x01(\t\x12(\n\tnode_info\x18\x03 \x01(\x0b\x32\x15.proto_types.NodeInfo\"p\n\x15SQLRunnerExceptionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.SQLRunnerException\"\xa8\x01\n\rLogTestResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\x12\n\nnum_models\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"f\n\x10LogTestResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogTestResult\"k\n\x0cLogStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"d\n\x0fLogStartLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.LogStartLine\"\x95\x01\n\x0eLogModelResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"h\n\x11LogModelResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogModelResult\"\x92\x02\n\x11LogSnapshotResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x34\n\x03\x63\x66g\x18\x07 \x03(\x0b\x32\'.proto_types.LogSnapshotResult.CfgEntry\x12\x16\n\x0eresult_message\x18\x08 \x01(\t\x1a*\n\x08\x43\x66gEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"n\n\x14LogSnapshotResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogSnapshotResult\"\xb9\x01\n\rLogSeedResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x16\n\x0eresult_message\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x0e\n\x06schema\x18\x07 \x01(\t\x12\x10\n\x08relation\x18\x08 \x01(\t\"f\n\x10LogSeedResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogSeedResult\"\xad\x01\n\x12LogFreshnessResult\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x13\n\x0bsource_name\x18\x06 \x01(\t\x12\x12\n\ntable_name\x18\x07 \x01(\t\"p\n\x15LogFreshnessResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogFreshnessResult\"\x98\x01\n\x11LogNodeNoOpResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"n\n\x14LogNodeNoOpResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogNodeNoOpResult\"\"\n\rLogCancelLine\x12\x11\n\tconn_name\x18\x01 \x01(\t\"f\n\x10LogCancelLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogCancelLine\"\x1f\n\x0f\x44\x65\x66\x61ultSelector\x12\x0c\n\x04name\x18\x01 \x01(\t\"j\n\x12\x44\x65\x66\x61ultSelectorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DefaultSelector\"5\n\tNodeStart\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"^\n\x0cNodeStartMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.NodeStart\"g\n\x0cNodeFinished\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12-\n\nrun_result\x18\x02 \x01(\x0b\x32\x19.proto_types.RunResultMsg\"d\n\x0fNodeFinishedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.NodeFinished\"+\n\x1bQueryCancelationUnsupported\x12\x0c\n\x04type\x18\x01 \x01(\t\"\x82\x01\n\x1eQueryCancelationUnsupportedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.QueryCancelationUnsupported\"O\n\x0f\x43oncurrencyLine\x12\x13\n\x0bnum_threads\x18\x01 \x01(\x05\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\x12\x12\n\nnode_count\x18\x03 \x01(\x05\"j\n\x12\x43oncurrencyLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ConcurrencyLine\"E\n\x19WritingInjectedSQLForNode\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"~\n\x1cWritingInjectedSQLForNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.WritingInjectedSQLForNode\"9\n\rNodeCompiling\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"f\n\x10NodeCompilingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeCompiling\"9\n\rNodeExecuting\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"f\n\x10NodeExecutingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeExecuting\"m\n\x10LogHookStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"l\n\x13LogHookStartLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.LogHookStartLine\"\x93\x01\n\x0eLogHookEndLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"h\n\x11LogHookEndLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogHookEndLine\"\x93\x01\n\x0fSkippingDetails\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x11\n\tnode_name\x18\x04 \x01(\t\x12\r\n\x05index\x18\x05 \x01(\x05\x12\r\n\x05total\x18\x06 \x01(\x05\"j\n\x12SkippingDetailsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SkippingDetails\"\r\n\x0bNothingToDo\"b\n\x0eNothingToDoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.NothingToDo\",\n\x1dRunningOperationUncaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x86\x01\n RunningOperationUncaughtErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.RunningOperationUncaughtError\"\x93\x01\n\x0c\x45ndRunResult\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.proto_types.RunResultMsg\x12\x14\n\x0c\x65lapsed_time\x18\x02 \x01(\x02\x12\x30\n\x0cgenerated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07success\x18\x04 \x01(\x08\"d\n\x0f\x45ndRunResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.EndRunResult\"\x11\n\x0fNoNodesSelected\"j\n\x12NoNodesSelectedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.NoNodesSelected\"w\n\x10\x43ommandCompleted\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x65lapsed\x18\x04 \x01(\x02\"l\n\x13\x43ommandCompletedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.CommandCompleted\"k\n\x08ShowNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0f\n\x07preview\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"\\\n\x0bShowNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.ShowNode\"p\n\x0c\x43ompiledNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x10\n\x08\x63ompiled\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"d\n\x0f\x43ompiledNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.CompiledNode\"b\n\x17\x43\x61tchableExceptionOnRun\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"z\n\x1a\x43\x61tchableExceptionOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.CatchableExceptionOnRun\"_\n\x12InternalErrorOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12(\n\tnode_info\x18\x03 \x01(\x0b\x32\x15.proto_types.NodeInfo\"p\n\x15InternalErrorOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InternalErrorOnRun\"u\n\x15GenericExceptionOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x0b\n\x03\x65xc\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"v\n\x18GenericExceptionOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.GenericExceptionOnRun\"N\n\x1aNodeConnectionReleaseError\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"\x80\x01\n\x1dNodeConnectionReleaseErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.NodeConnectionReleaseError\"\x1f\n\nFoundStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\"`\n\rFoundStatsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.FoundStats\"\x17\n\x15MainKeyboardInterrupt\"v\n\x18MainKeyboardInterruptMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainKeyboardInterrupt\"#\n\x14MainEncounteredError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"t\n\x17MainEncounteredErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MainEncounteredError\"%\n\x0eMainStackTrace\x12\x13\n\x0bstack_trace\x18\x01 \x01(\t\"h\n\x11MainStackTraceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainStackTrace\"p\n\x13TimingInfoCollected\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12/\n\x0btiming_info\x18\x02 \x01(\x0b\x32\x1a.proto_types.TimingInfoMsg\"r\n\x16TimingInfoCollectedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.TimingInfoCollected\"&\n\x12LogDebugStackTrace\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"p\n\x15LogDebugStackTraceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDebugStackTrace\"\x1e\n\x0e\x43heckCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"h\n\x11\x43heckCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CheckCleanPath\" \n\x10\x43onfirmCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"l\n\x13\x43onfirmCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConfirmCleanPath\"\"\n\x12ProtectedCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"p\n\x15ProtectedCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ProtectedCleanPath\"\x14\n\x12\x46inishedCleanPaths\"p\n\x15\x46inishedCleanPathsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FinishedCleanPaths\"5\n\x0bOpenCommand\x12\x10\n\x08open_cmd\x18\x01 \x01(\t\x12\x14\n\x0cprofiles_dir\x18\x02 \x01(\t\"b\n\x0eOpenCommandMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.OpenCommand\"0\n\x0fServingDocsPort\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\"j\n\x12ServingDocsPortMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ServingDocsPort\"%\n\x15ServingDocsAccessInfo\x12\x0c\n\x04port\x18\x01 \x01(\t\"v\n\x18ServingDocsAccessInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ServingDocsAccessInfo\"\x15\n\x13ServingDocsExitInfo\"r\n\x16ServingDocsExitInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.ServingDocsExitInfo\"t\n\x10RunResultWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"l\n\x13RunResultWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultWarning\"t\n\x10RunResultFailure\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"l\n\x13RunResultFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultFailure\"k\n\tStatsLine\x12\x30\n\x05stats\x18\x01 \x03(\x0b\x32!.proto_types.StatsLine.StatsEntry\x1a,\n\nStatsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"^\n\x0cStatsLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.StatsLine\"G\n\x0eRunResultError\x12\x0b\n\x03msg\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"h\n\x11RunResultErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RunResultError\"S\n\x17RunResultErrorNoMessage\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1aRunResultErrorNoMessageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultErrorNoMessage\"I\n\x0fSQLCompiledPath\x12\x0c\n\x04path\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"j\n\x12SQLCompiledPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SQLCompiledPath\"W\n\x14\x43heckNodeTestFailure\x12\x15\n\rrelation_name\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"t\n\x17\x43heckNodeTestFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.CheckNodeTestFailure\"W\n\x0f\x45ndOfRunSummary\x12\x12\n\nnum_errors\x18\x01 \x01(\x05\x12\x14\n\x0cnum_warnings\x18\x02 \x01(\x05\x12\x1a\n\x12keyboard_interrupt\x18\x03 \x01(\x08\"j\n\x12\x45ndOfRunSummaryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.EndOfRunSummary\"U\n\x13LogSkipBecauseError\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\x10\n\x08relation\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"r\n\x16LogSkipBecauseErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.LogSkipBecauseError\"\x14\n\x12\x45nsureGitInstalled\"p\n\x15\x45nsureGitInstalledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.EnsureGitInstalled\"\x1a\n\x18\x44\x65psCreatingLocalSymlink\"|\n\x1b\x44\x65psCreatingLocalSymlinkMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsCreatingLocalSymlink\"\x19\n\x17\x44\x65psSymlinkNotAvailable\"z\n\x1a\x44\x65psSymlinkNotAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsSymlinkNotAvailable\"\x11\n\x0f\x44isableTracking\"j\n\x12\x44isableTrackingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DisableTracking\"\x1e\n\x0cSendingEvent\x12\x0e\n\x06kwargs\x18\x01 \x01(\t\"d\n\x0fSendingEventMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SendingEvent\"\x12\n\x10SendEventFailure\"l\n\x13SendEventFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SendEventFailure\"\r\n\x0b\x46lushEvents\"b\n\x0e\x46lushEventsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.FlushEvents\"\x14\n\x12\x46lushEventsFailure\"p\n\x15\x46lushEventsFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FlushEventsFailure\"-\n\x19TrackingInitializeFailure\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"~\n\x1cTrackingInitializeFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.TrackingInitializeFailure\"P\n\x17RunResultWarningMessage\x12\x0b\n\x03msg\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1aRunResultWarningMessageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultWarningMessage\"\x1a\n\x0b\x44\x65\x62ugCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"b\n\x0e\x44\x65\x62ugCmdOutMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.DebugCmdOut\"\x1d\n\x0e\x44\x65\x62ugCmdResult\x12\x0b\n\x03msg\x18\x01 \x01(\t\"h\n\x11\x44\x65\x62ugCmdResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DebugCmdResult\"\x19\n\nListCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"`\n\rListCmdOutMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.ListCmdOut\"\xec\x01\n\x0eResourceReport\x12\x14\n\x0c\x63ommand_name\x18\x02 \x01(\t\x12\x17\n\x0f\x63ommand_success\x18\x03 \x01(\x08\x12\x1f\n\x17\x63ommand_wall_clock_time\x18\x04 \x01(\x02\x12\x19\n\x11process_user_time\x18\x05 \x01(\x02\x12\x1b\n\x13process_kernel_time\x18\x06 \x01(\x02\x12\x1b\n\x13process_mem_max_rss\x18\x07 \x01(\x03\x12\x19\n\x11process_in_blocks\x18\x08 \x01(\x03\x12\x1a\n\x12process_out_blocks\x18\t \x01(\x03\"h\n\x11ResourceReportMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ResourceReportb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x63ore_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x99\x02\n\rCoreEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x05\x65xtra\x18\t \x03(\x0b\x32%.proto_types.CoreEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"V\n\x0cNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x91\x02\n\x08NodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x30\n\rnode_relation\x18\n \x01(\x0b\x32\x19.proto_types.NodeRelation\"\x7f\n\rTimingInfoMsg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nstarted_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xd1\x01\n\x0cRunResultMsg\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12/\n\x0btiming_info\x18\x03 \x03(\x0b\x32\x1a.proto_types.TimingInfoMsg\x12\x0e\n\x06thread\x18\x04 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x31\n\x10\x61\x64\x61pter_response\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"\\\n\nColumnType\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x1c\n\x14previous_column_type\x18\x02 \x01(\t\x12\x1b\n\x13\x63urrent_column_type\x18\x03 \x01(\t\"Y\n\x10\x43olumnConstraint\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x17\n\x0f\x63onstraint_name\x18\x02 \x01(\t\x12\x17\n\x0f\x63onstraint_type\x18\x03 \x01(\t\"T\n\x0fModelConstraint\x12\x17\n\x0f\x63onstraint_name\x18\x01 \x01(\t\x12\x17\n\x0f\x63onstraint_type\x18\x02 \x01(\t\x12\x0f\n\x07\x63olumns\x18\x03 \x03(\t\"9\n\x11MainReportVersion\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x13\n\x0blog_version\x18\x02 \x01(\x05\"n\n\x14MainReportVersionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.MainReportVersion\"r\n\x0eMainReportArgs\x12\x33\n\x04\x61rgs\x18\x01 \x03(\x0b\x32%.proto_types.MainReportArgs.ArgsEntry\x1a+\n\tArgsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"h\n\x11MainReportArgsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainReportArgs\"+\n\x15MainTrackingUserState\x12\x12\n\nuser_state\x18\x01 \x01(\t\"v\n\x18MainTrackingUserStateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainTrackingUserState\"5\n\x0fMergedFromState\x12\x12\n\nnum_merged\x18\x01 \x01(\x05\x12\x0e\n\x06sample\x18\x02 \x03(\t\"j\n\x12MergedFromStateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.MergedFromState\"A\n\x14MissingProfileTarget\x12\x14\n\x0cprofile_name\x18\x01 \x01(\t\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\"t\n\x17MissingProfileTargetMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MissingProfileTarget\"(\n\x11InvalidOptionYAML\x12\x13\n\x0boption_name\x18\x01 \x01(\t\"n\n\x14InvalidOptionYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.InvalidOptionYAML\"!\n\x12LogDbtProjectError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"p\n\x15LogDbtProjectErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProjectError\"3\n\x12LogDbtProfileError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08profiles\x18\x02 \x03(\t\"p\n\x15LogDbtProfileErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProfileError\"!\n\x12StarterProjectPath\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"p\n\x15StarterProjectPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StarterProjectPath\"$\n\x15\x43onfigFolderDirectory\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"v\n\x18\x43onfigFolderDirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConfigFolderDirectory\"\'\n\x14NoSampleProfileFound\x12\x0f\n\x07\x61\x64\x61pter\x18\x01 \x01(\t\"t\n\x17NoSampleProfileFoundMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NoSampleProfileFound\"6\n\x18ProfileWrittenWithSample\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"|\n\x1bProfileWrittenWithSampleMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProfileWrittenWithSample\"B\n$ProfileWrittenWithTargetTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x94\x01\n\'ProfileWrittenWithTargetTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.ProfileWrittenWithTargetTemplateYAML\"C\n%ProfileWrittenWithProjectTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x96\x01\n(ProfileWrittenWithProjectTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.ProfileWrittenWithProjectTemplateYAML\"\x12\n\x10SettingUpProfile\"l\n\x13SettingUpProfileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SettingUpProfile\"\x1c\n\x1aInvalidProfileTemplateYAML\"\x80\x01\n\x1dInvalidProfileTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.InvalidProfileTemplateYAML\"(\n\x18ProjectNameAlreadyExists\x12\x0c\n\x04name\x18\x01 \x01(\t\"|\n\x1bProjectNameAlreadyExistsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProjectNameAlreadyExists\"K\n\x0eProjectCreated\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x10\n\x08\x64ocs_url\x18\x02 \x01(\t\x12\x11\n\tslack_url\x18\x03 \x01(\t\"h\n\x11ProjectCreatedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ProjectCreated\"@\n\x1aPackageRedirectDeprecation\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x80\x01\n\x1dPackageRedirectDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.PackageRedirectDeprecation\"\x1f\n\x1dPackageInstallPathDeprecation\"\x86\x01\n PackageInstallPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.PackageInstallPathDeprecation\"H\n\x1b\x43onfigSourcePathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"\x82\x01\n\x1e\x43onfigSourcePathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigSourcePathDeprecation\"F\n\x19\x43onfigDataPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"~\n\x1c\x43onfigDataPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConfigDataPathDeprecation\".\n\x17MetricAttributesRenamed\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"z\n\x1aMetricAttributesRenamedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.MetricAttributesRenamed\"+\n\x17\x45xposureNameDeprecation\x12\x10\n\x08\x65xposure\x18\x01 \x01(\t\"z\n\x1a\x45xposureNameDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.ExposureNameDeprecation\"^\n\x13InternalDeprecation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x18\n\x10suggested_action\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"r\n\x16InternalDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.InternalDeprecation\"@\n\x1a\x45nvironmentVariableRenamed\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x80\x01\n\x1d\x45nvironmentVariableRenamedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.EnvironmentVariableRenamed\"3\n\x18\x43onfigLogPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"|\n\x1b\x43onfigLogPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ConfigLogPathDeprecation\"6\n\x1b\x43onfigTargetPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"\x82\x01\n\x1e\x43onfigTargetPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigTargetPathDeprecation\"C\n\x16TestsConfigDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"x\n\x19TestsConfigDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.TestsConfigDeprecation\"\x1e\n\x1cProjectFlagsMovedDeprecation\"\x84\x01\n\x1fProjectFlagsMovedDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.ProjectFlagsMovedDeprecation\"C\n\x1fSpacesInResourceNameDeprecation\x12\x11\n\tunique_id\x18\x01 \x01(\t\x12\r\n\x05level\x18\x02 \x01(\t\"\x8a\x01\n\"SpacesInResourceNameDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.SpacesInResourceNameDeprecation\"i\n\"ResourceNamesWithSpacesDeprecation\x12\x1b\n\x13\x63ount_invalid_names\x18\x01 \x01(\x05\x12\x17\n\x0fshow_debug_hint\x18\x02 \x01(\x08\x12\r\n\x05level\x18\x03 \x01(\t\"\x90\x01\n%ResourceNamesWithSpacesDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12=\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32/.proto_types.ResourceNamesWithSpacesDeprecation\"_\n)PackageMaterializationOverrideDeprecation\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x1c\n\x14materialization_name\x18\x02 \x01(\t\"\x9e\x01\n,PackageMaterializationOverrideDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x44\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x36.proto_types.PackageMaterializationOverrideDeprecation\"#\n!SourceFreshnessProjectHooksNotRun\"\x8e\x01\n$SourceFreshnessProjectHooksNotRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.SourceFreshnessProjectHooksNotRun\"V\n\x0f\x44\x65precatedModel\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\x18\n\x10\x64\x65precation_date\x18\x03 \x01(\t\"j\n\x12\x44\x65precatedModelMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DeprecatedModel\"7\n\x12InputFileDiffError\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12\x0f\n\x07\x66ile_id\x18\x02 \x01(\t\"p\n\x15InputFileDiffErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InputFileDiffError\"?\n\x14InvalidValueForField\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12\x13\n\x0b\x66ield_value\x18\x02 \x01(\t\"t\n\x17InvalidValueForFieldMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.InvalidValueForField\"Q\n\x11ValidationWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x12\n\nfield_name\x18\x02 \x01(\t\x12\x11\n\tnode_name\x18\x03 \x01(\t\"n\n\x14ValidationWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ValidationWarning\"!\n\x11ParsePerfInfoPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"n\n\x14ParsePerfInfoPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ParsePerfInfoPath\"1\n!PartialParsingErrorProcessingFile\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\"\x8e\x01\n$PartialParsingErrorProcessingFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.PartialParsingErrorProcessingFile\"\x86\x01\n\x13PartialParsingError\x12?\n\x08\x65xc_info\x18\x01 \x03(\x0b\x32-.proto_types.PartialParsingError.ExcInfoEntry\x1a.\n\x0c\x45xcInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"r\n\x16PartialParsingErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.PartialParsingError\"\x1b\n\x19PartialParsingSkipParsing\"~\n\x1cPartialParsingSkipParsingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.PartialParsingSkipParsing\"&\n\x14UnableToPartialParse\x12\x0e\n\x06reason\x18\x01 \x01(\t\"t\n\x17UnableToPartialParseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.UnableToPartialParse\"f\n\x12StateCheckVarsHash\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\x0c\n\x04vars\x18\x02 \x01(\t\x12\x0f\n\x07profile\x18\x03 \x01(\t\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\x0f\n\x07version\x18\x05 \x01(\t\"p\n\x15StateCheckVarsHashMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StateCheckVarsHash\"\x1a\n\x18PartialParsingNotEnabled\"|\n\x1bPartialParsingNotEnabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.PartialParsingNotEnabled\"C\n\x14ParsedFileLoadFailed\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"t\n\x17ParsedFileLoadFailedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParsedFileLoadFailed\"H\n\x15PartialParsingEnabled\x12\x0f\n\x07\x64\x65leted\x18\x01 \x01(\x05\x12\r\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x05\x12\x0f\n\x07\x63hanged\x18\x03 \x01(\x05\"v\n\x18PartialParsingEnabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.PartialParsingEnabled\"8\n\x12PartialParsingFile\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\t\x12\x11\n\toperation\x18\x02 \x01(\t\"p\n\x15PartialParsingFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.PartialParsingFile\"\xaf\x01\n\x1fInvalidDisabledTargetInTestNode\x12\x1b\n\x13resource_type_title\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1a\n\x12original_file_path\x18\x03 \x01(\t\x12\x13\n\x0btarget_kind\x18\x04 \x01(\t\x12\x13\n\x0btarget_name\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\"\x8a\x01\n\"InvalidDisabledTargetInTestNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.InvalidDisabledTargetInTestNode\"7\n\x18UnusedResourceConfigPath\x12\x1b\n\x13unused_config_paths\x18\x01 \x03(\t\"|\n\x1bUnusedResourceConfigPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.UnusedResourceConfigPath\"3\n\rSeedIncreased\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"f\n\x10SeedIncreasedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.SeedIncreased\">\n\x18SeedExceedsLimitSamePath\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"|\n\x1bSeedExceedsLimitSamePathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.SeedExceedsLimitSamePath\"D\n\x1eSeedExceedsLimitAndPathChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x88\x01\n!SeedExceedsLimitAndPathChangedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.SeedExceedsLimitAndPathChanged\"\\\n\x1fSeedExceedsLimitChecksumChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x15\n\rchecksum_name\x18\x03 \x01(\t\"\x8a\x01\n\"SeedExceedsLimitChecksumChangedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.SeedExceedsLimitChecksumChanged\"%\n\x0cUnusedTables\x12\x15\n\runused_tables\x18\x01 \x03(\t\"d\n\x0fUnusedTablesMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.UnusedTables\"\x87\x01\n\x17WrongResourceSchemaFile\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x1c\n\x14plural_resource_type\x18\x03 \x01(\t\x12\x10\n\x08yaml_key\x18\x04 \x01(\t\x12\x11\n\tfile_path\x18\x05 \x01(\t\"z\n\x1aWrongResourceSchemaFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.WrongResourceSchemaFile\"K\n\x10NoNodeForYamlKey\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x10\n\x08yaml_key\x18\x02 \x01(\t\x12\x11\n\tfile_path\x18\x03 \x01(\t\"l\n\x13NoNodeForYamlKeyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.NoNodeForYamlKey\"+\n\x15MacroNotFoundForPatch\x12\x12\n\npatch_name\x18\x01 \x01(\t\"v\n\x18MacroNotFoundForPatchMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MacroNotFoundForPatch\"\xb8\x01\n\x16NodeNotFoundOrDisabled\x12\x1a\n\x12original_file_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1b\n\x13resource_type_title\x18\x03 \x01(\t\x12\x13\n\x0btarget_name\x18\x04 \x01(\t\x12\x13\n\x0btarget_kind\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\x12\x10\n\x08\x64isabled\x18\x07 \x01(\t\"x\n\x19NodeNotFoundOrDisabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.NodeNotFoundOrDisabled\"H\n\x0fJinjaLogWarning\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"j\n\x12JinjaLogWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.JinjaLogWarning\"E\n\x0cJinjaLogInfo\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"d\n\x0fJinjaLogInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.JinjaLogInfo\"F\n\rJinjaLogDebug\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"f\n\x10JinjaLogDebugMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.JinjaLogDebug\"\xae\x01\n\x1eUnpinnedRefNewVersionAvailable\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rref_node_name\x18\x02 \x01(\t\x12\x18\n\x10ref_node_package\x18\x03 \x01(\t\x12\x18\n\x10ref_node_version\x18\x04 \x01(\t\x12\x17\n\x0fref_max_version\x18\x05 \x01(\t\"\x88\x01\n!UnpinnedRefNewVersionAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.UnpinnedRefNewVersionAvailable\"\xc6\x01\n\x1cUpcomingReferenceDeprecation\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"\x84\x01\n\x1fUpcomingReferenceDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.UpcomingReferenceDeprecation\"\xbd\x01\n\x13\x44\x65precatedReference\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"r\n\x16\x44\x65precatedReferenceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DeprecatedReference\"<\n$UnsupportedConstraintMaterialization\x12\x14\n\x0cmaterialized\x18\x01 \x01(\t\"\x94\x01\n\'UnsupportedConstraintMaterializationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.UnsupportedConstraintMaterialization\"M\n\x14ParseInlineNodeError\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\"t\n\x17ParseInlineNodeErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParseInlineNodeError\"(\n\x19SemanticValidationFailure\x12\x0b\n\x03msg\x18\x02 \x01(\t\"~\n\x1cSemanticValidationFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.SemanticValidationFailure\"\x8a\x03\n\x19UnversionedBreakingChange\x12\x18\n\x10\x62reaking_changes\x18\x01 \x03(\t\x12\x12\n\nmodel_name\x18\x02 \x01(\t\x12\x17\n\x0fmodel_file_path\x18\x03 \x01(\t\x12\"\n\x1a\x63ontract_enforced_disabled\x18\x04 \x01(\x08\x12\x17\n\x0f\x63olumns_removed\x18\x05 \x03(\t\x12\x34\n\x13\x63olumn_type_changes\x18\x06 \x03(\x0b\x32\x17.proto_types.ColumnType\x12I\n\"enforced_column_constraint_removed\x18\x07 \x03(\x0b\x32\x1d.proto_types.ColumnConstraint\x12G\n!enforced_model_constraint_removed\x18\x08 \x03(\x0b\x32\x1c.proto_types.ModelConstraint\x12\x1f\n\x17materialization_changed\x18\t \x03(\t\"~\n\x1cUnversionedBreakingChangeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.UnversionedBreakingChange\"*\n\x14WarnStateTargetEqual\x12\x12\n\nstate_path\x18\x01 \x01(\t\"t\n\x17WarnStateTargetEqualMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.WarnStateTargetEqual\"%\n\x16\x46reshnessConfigProblem\x12\x0b\n\x03msg\x18\x01 \x01(\t\"x\n\x19\x46reshnessConfigProblemMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessConfigProblem\"/\n\x1dGitSparseCheckoutSubdirectory\x12\x0e\n\x06subdir\x18\x01 \x01(\t\"\x86\x01\n GitSparseCheckoutSubdirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.GitSparseCheckoutSubdirectory\"/\n\x1bGitProgressCheckoutRevision\x12\x10\n\x08revision\x18\x01 \x01(\t\"\x82\x01\n\x1eGitProgressCheckoutRevisionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.GitProgressCheckoutRevision\"4\n%GitProgressUpdatingExistingDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x96\x01\n(GitProgressUpdatingExistingDependencyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.GitProgressUpdatingExistingDependency\".\n\x1fGitProgressPullingNewDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x8a\x01\n\"GitProgressPullingNewDependencyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressPullingNewDependency\"\x1d\n\x0eGitNothingToDo\x12\x0b\n\x03sha\x18\x01 \x01(\t\"h\n\x11GitNothingToDoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.GitNothingToDo\"E\n\x1fGitProgressUpdatedCheckoutRange\x12\x11\n\tstart_sha\x18\x01 \x01(\t\x12\x0f\n\x07\x65nd_sha\x18\x02 \x01(\t\"\x8a\x01\n\"GitProgressUpdatedCheckoutRangeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressUpdatedCheckoutRange\"*\n\x17GitProgressCheckedOutAt\x12\x0f\n\x07\x65nd_sha\x18\x01 \x01(\t\"z\n\x1aGitProgressCheckedOutAtMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.GitProgressCheckedOutAt\")\n\x1aRegistryProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x80\x01\n\x1dRegistryProgressGETRequestMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.RegistryProgressGETRequest\"=\n\x1bRegistryProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x82\x01\n\x1eRegistryProgressGETResponseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RegistryProgressGETResponse\"_\n\x1dSelectorReportInvalidSelector\x12\x17\n\x0fvalid_selectors\x18\x01 \x01(\t\x12\x13\n\x0bspec_method\x18\x02 \x01(\t\x12\x10\n\x08raw_spec\x18\x03 \x01(\t\"\x86\x01\n SelectorReportInvalidSelectorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.SelectorReportInvalidSelector\"\x15\n\x13\x44\x65psNoPackagesFound\"r\n\x16\x44\x65psNoPackagesFoundMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsNoPackagesFound\"/\n\x17\x44\x65psStartPackageInstall\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"z\n\x1a\x44\x65psStartPackageInstallMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsStartPackageInstall\"\'\n\x0f\x44\x65psInstallInfo\x12\x14\n\x0cversion_name\x18\x01 \x01(\t\"j\n\x12\x44\x65psInstallInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DepsInstallInfo\"-\n\x13\x44\x65psUpdateAvailable\x12\x16\n\x0eversion_latest\x18\x01 \x01(\t\"r\n\x16\x44\x65psUpdateAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsUpdateAvailable\"\x0e\n\x0c\x44\x65psUpToDate\"d\n\x0f\x44\x65psUpToDateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUpToDate\",\n\x14\x44\x65psListSubdirectory\x12\x14\n\x0csubdirectory\x18\x01 \x01(\t\"t\n\x17\x44\x65psListSubdirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.DepsListSubdirectory\".\n\x1a\x44\x65psNotifyUpdatesAvailable\x12\x10\n\x08packages\x18\x01 \x03(\t\"\x80\x01\n\x1d\x44\x65psNotifyUpdatesAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.DepsNotifyUpdatesAvailable\".\n\x1fRegistryIndexProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x8a\x01\n\"RegistryIndexProgressGETRequestMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryIndexProgressGETRequest\"B\n RegistryIndexProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x8c\x01\n#RegistryIndexProgressGETResponseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12;\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32-.proto_types.RegistryIndexProgressGETResponse\"2\n\x1eRegistryResponseUnexpectedType\x12\x10\n\x08response\x18\x01 \x01(\t\"\x88\x01\n!RegistryResponseUnexpectedTypeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseUnexpectedType\"2\n\x1eRegistryResponseMissingTopKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x88\x01\n!RegistryResponseMissingTopKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseMissingTopKeys\"5\n!RegistryResponseMissingNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8e\x01\n$RegistryResponseMissingNestedKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.RegistryResponseMissingNestedKeys\"3\n\x1fRegistryResponseExtraNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8a\x01\n\"RegistryResponseExtraNestedKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryResponseExtraNestedKeys\"(\n\x18\x44\x65psSetDownloadDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\"|\n\x1b\x44\x65psSetDownloadDirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsSetDownloadDirectory\"-\n\x0c\x44\x65psUnpinned\x12\x10\n\x08revision\x18\x01 \x01(\t\x12\x0b\n\x03git\x18\x02 \x01(\t\"d\n\x0f\x44\x65psUnpinnedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUnpinned\"/\n\x1bNoNodesForSelectionCriteria\x12\x10\n\x08spec_raw\x18\x01 \x01(\t\"\x82\x01\n\x1eNoNodesForSelectionCriteriaMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.NoNodesForSelectionCriteria\")\n\x10\x44\x65psLockUpdating\x12\x15\n\rlock_filepath\x18\x01 \x01(\t\"l\n\x13\x44\x65psLockUpdatingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.DepsLockUpdating\"R\n\x0e\x44\x65psAddPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x19\n\x11packages_filepath\x18\x03 \x01(\t\"h\n\x11\x44\x65psAddPackageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DepsAddPackage\"\xa7\x01\n\x19\x44\x65psFoundDuplicatePackage\x12S\n\x0fremoved_package\x18\x01 \x03(\x0b\x32:.proto_types.DepsFoundDuplicatePackage.RemovedPackageEntry\x1a\x35\n\x13RemovedPackageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"~\n\x1c\x44\x65psFoundDuplicatePackageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.DepsFoundDuplicatePackage\"$\n\x12\x44\x65psVersionMissing\x12\x0e\n\x06source\x18\x01 \x01(\t\"p\n\x15\x44\x65psVersionMissingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.DepsVersionMissing\"/\n\x17\x44\x65psScrubbedPackageName\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"z\n\x1a\x44\x65psScrubbedPackageNameMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsScrubbedPackageName\"*\n\x1bRunningOperationCaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x82\x01\n\x1eRunningOperationCaughtErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RunningOperationCaughtError\"\x11\n\x0f\x43ompileComplete\"j\n\x12\x43ompileCompleteMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.CompileComplete\"\x18\n\x16\x46reshnessCheckComplete\"x\n\x19\x46reshnessCheckCompleteMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessCheckComplete\"\x1c\n\nSeedHeader\x12\x0e\n\x06header\x18\x01 \x01(\t\"`\n\rSeedHeaderMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SeedHeader\"]\n\x12SQLRunnerException\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x02 \x01(\t\x12(\n\tnode_info\x18\x03 \x01(\x0b\x32\x15.proto_types.NodeInfo\"p\n\x15SQLRunnerExceptionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.SQLRunnerException\"\xa8\x01\n\rLogTestResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\x12\n\nnum_models\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"f\n\x10LogTestResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogTestResult\"k\n\x0cLogStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"d\n\x0fLogStartLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.LogStartLine\"\x95\x01\n\x0eLogModelResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"h\n\x11LogModelResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogModelResult\"\x92\x02\n\x11LogSnapshotResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x34\n\x03\x63\x66g\x18\x07 \x03(\x0b\x32\'.proto_types.LogSnapshotResult.CfgEntry\x12\x16\n\x0eresult_message\x18\x08 \x01(\t\x1a*\n\x08\x43\x66gEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"n\n\x14LogSnapshotResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogSnapshotResult\"\xb9\x01\n\rLogSeedResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x16\n\x0eresult_message\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x0e\n\x06schema\x18\x07 \x01(\t\x12\x10\n\x08relation\x18\x08 \x01(\t\"f\n\x10LogSeedResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogSeedResult\"\xad\x01\n\x12LogFreshnessResult\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x13\n\x0bsource_name\x18\x06 \x01(\t\x12\x12\n\ntable_name\x18\x07 \x01(\t\"p\n\x15LogFreshnessResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogFreshnessResult\"\x98\x01\n\x11LogNodeNoOpResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"n\n\x14LogNodeNoOpResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogNodeNoOpResult\"\"\n\rLogCancelLine\x12\x11\n\tconn_name\x18\x01 \x01(\t\"f\n\x10LogCancelLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogCancelLine\"\x1f\n\x0f\x44\x65\x66\x61ultSelector\x12\x0c\n\x04name\x18\x01 \x01(\t\"j\n\x12\x44\x65\x66\x61ultSelectorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DefaultSelector\"5\n\tNodeStart\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"^\n\x0cNodeStartMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.NodeStart\"g\n\x0cNodeFinished\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12-\n\nrun_result\x18\x02 \x01(\x0b\x32\x19.proto_types.RunResultMsg\"d\n\x0fNodeFinishedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.NodeFinished\"+\n\x1bQueryCancelationUnsupported\x12\x0c\n\x04type\x18\x01 \x01(\t\"\x82\x01\n\x1eQueryCancelationUnsupportedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.QueryCancelationUnsupported\"O\n\x0f\x43oncurrencyLine\x12\x13\n\x0bnum_threads\x18\x01 \x01(\x05\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\x12\x12\n\nnode_count\x18\x03 \x01(\x05\"j\n\x12\x43oncurrencyLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ConcurrencyLine\"E\n\x19WritingInjectedSQLForNode\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"~\n\x1cWritingInjectedSQLForNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.WritingInjectedSQLForNode\"9\n\rNodeCompiling\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"f\n\x10NodeCompilingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeCompiling\"9\n\rNodeExecuting\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"f\n\x10NodeExecutingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeExecuting\"m\n\x10LogHookStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"l\n\x13LogHookStartLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.LogHookStartLine\"\x93\x01\n\x0eLogHookEndLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"h\n\x11LogHookEndLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogHookEndLine\"\x93\x01\n\x0fSkippingDetails\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x11\n\tnode_name\x18\x04 \x01(\t\x12\r\n\x05index\x18\x05 \x01(\x05\x12\r\n\x05total\x18\x06 \x01(\x05\"j\n\x12SkippingDetailsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SkippingDetails\"\r\n\x0bNothingToDo\"b\n\x0eNothingToDoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.NothingToDo\",\n\x1dRunningOperationUncaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x86\x01\n RunningOperationUncaughtErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.RunningOperationUncaughtError\"\x93\x01\n\x0c\x45ndRunResult\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.proto_types.RunResultMsg\x12\x14\n\x0c\x65lapsed_time\x18\x02 \x01(\x02\x12\x30\n\x0cgenerated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07success\x18\x04 \x01(\x08\"d\n\x0f\x45ndRunResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.EndRunResult\"\x11\n\x0fNoNodesSelected\"j\n\x12NoNodesSelectedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.NoNodesSelected\"w\n\x10\x43ommandCompleted\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x65lapsed\x18\x04 \x01(\x02\"l\n\x13\x43ommandCompletedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.CommandCompleted\"k\n\x08ShowNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0f\n\x07preview\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"\\\n\x0bShowNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.ShowNode\"p\n\x0c\x43ompiledNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x10\n\x08\x63ompiled\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"d\n\x0f\x43ompiledNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.CompiledNode\"b\n\x17\x43\x61tchableExceptionOnRun\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"z\n\x1a\x43\x61tchableExceptionOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.CatchableExceptionOnRun\"_\n\x12InternalErrorOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12(\n\tnode_info\x18\x03 \x01(\x0b\x32\x15.proto_types.NodeInfo\"p\n\x15InternalErrorOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InternalErrorOnRun\"u\n\x15GenericExceptionOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x0b\n\x03\x65xc\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"v\n\x18GenericExceptionOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.GenericExceptionOnRun\"N\n\x1aNodeConnectionReleaseError\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"\x80\x01\n\x1dNodeConnectionReleaseErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.NodeConnectionReleaseError\"\x1f\n\nFoundStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\"`\n\rFoundStatsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.FoundStats\"\x17\n\x15MainKeyboardInterrupt\"v\n\x18MainKeyboardInterruptMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainKeyboardInterrupt\"#\n\x14MainEncounteredError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"t\n\x17MainEncounteredErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MainEncounteredError\"%\n\x0eMainStackTrace\x12\x13\n\x0bstack_trace\x18\x01 \x01(\t\"h\n\x11MainStackTraceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainStackTrace\"p\n\x13TimingInfoCollected\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12/\n\x0btiming_info\x18\x02 \x01(\x0b\x32\x1a.proto_types.TimingInfoMsg\"r\n\x16TimingInfoCollectedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.TimingInfoCollected\"&\n\x12LogDebugStackTrace\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"p\n\x15LogDebugStackTraceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDebugStackTrace\"\x1e\n\x0e\x43heckCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"h\n\x11\x43heckCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CheckCleanPath\" \n\x10\x43onfirmCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"l\n\x13\x43onfirmCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConfirmCleanPath\"\"\n\x12ProtectedCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"p\n\x15ProtectedCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ProtectedCleanPath\"\x14\n\x12\x46inishedCleanPaths\"p\n\x15\x46inishedCleanPathsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FinishedCleanPaths\"5\n\x0bOpenCommand\x12\x10\n\x08open_cmd\x18\x01 \x01(\t\x12\x14\n\x0cprofiles_dir\x18\x02 \x01(\t\"b\n\x0eOpenCommandMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.OpenCommand\"0\n\x0fServingDocsPort\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\"j\n\x12ServingDocsPortMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ServingDocsPort\"%\n\x15ServingDocsAccessInfo\x12\x0c\n\x04port\x18\x01 \x01(\t\"v\n\x18ServingDocsAccessInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ServingDocsAccessInfo\"\x15\n\x13ServingDocsExitInfo\"r\n\x16ServingDocsExitInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.ServingDocsExitInfo\"t\n\x10RunResultWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"l\n\x13RunResultWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultWarning\"t\n\x10RunResultFailure\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"l\n\x13RunResultFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultFailure\"k\n\tStatsLine\x12\x30\n\x05stats\x18\x01 \x03(\x0b\x32!.proto_types.StatsLine.StatsEntry\x1a,\n\nStatsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"^\n\x0cStatsLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.StatsLine\"G\n\x0eRunResultError\x12\x0b\n\x03msg\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"h\n\x11RunResultErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RunResultError\"S\n\x17RunResultErrorNoMessage\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1aRunResultErrorNoMessageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultErrorNoMessage\"I\n\x0fSQLCompiledPath\x12\x0c\n\x04path\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"j\n\x12SQLCompiledPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SQLCompiledPath\"W\n\x14\x43heckNodeTestFailure\x12\x15\n\rrelation_name\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"t\n\x17\x43heckNodeTestFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.CheckNodeTestFailure\"W\n\x0f\x45ndOfRunSummary\x12\x12\n\nnum_errors\x18\x01 \x01(\x05\x12\x14\n\x0cnum_warnings\x18\x02 \x01(\x05\x12\x1a\n\x12keyboard_interrupt\x18\x03 \x01(\x08\"j\n\x12\x45ndOfRunSummaryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.EndOfRunSummary\"U\n\x13LogSkipBecauseError\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\x10\n\x08relation\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"r\n\x16LogSkipBecauseErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.LogSkipBecauseError\"\x14\n\x12\x45nsureGitInstalled\"p\n\x15\x45nsureGitInstalledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.EnsureGitInstalled\"\x1a\n\x18\x44\x65psCreatingLocalSymlink\"|\n\x1b\x44\x65psCreatingLocalSymlinkMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsCreatingLocalSymlink\"\x19\n\x17\x44\x65psSymlinkNotAvailable\"z\n\x1a\x44\x65psSymlinkNotAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsSymlinkNotAvailable\"\x11\n\x0f\x44isableTracking\"j\n\x12\x44isableTrackingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DisableTracking\"\x1e\n\x0cSendingEvent\x12\x0e\n\x06kwargs\x18\x01 \x01(\t\"d\n\x0fSendingEventMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SendingEvent\"\x12\n\x10SendEventFailure\"l\n\x13SendEventFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SendEventFailure\"\r\n\x0b\x46lushEvents\"b\n\x0e\x46lushEventsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.FlushEvents\"\x14\n\x12\x46lushEventsFailure\"p\n\x15\x46lushEventsFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FlushEventsFailure\"-\n\x19TrackingInitializeFailure\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"~\n\x1cTrackingInitializeFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.TrackingInitializeFailure\"P\n\x17RunResultWarningMessage\x12\x0b\n\x03msg\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1aRunResultWarningMessageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultWarningMessage\"\x1a\n\x0b\x44\x65\x62ugCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"b\n\x0e\x44\x65\x62ugCmdOutMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.DebugCmdOut\"\x1d\n\x0e\x44\x65\x62ugCmdResult\x12\x0b\n\x03msg\x18\x01 \x01(\t\"h\n\x11\x44\x65\x62ugCmdResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DebugCmdResult\"\x19\n\nListCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"`\n\rListCmdOutMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.ListCmdOut\"\xec\x01\n\x0eResourceReport\x12\x14\n\x0c\x63ommand_name\x18\x02 \x01(\t\x12\x17\n\x0f\x63ommand_success\x18\x03 \x01(\x08\x12\x1f\n\x17\x63ommand_wall_clock_time\x18\x04 \x01(\x02\x12\x19\n\x11process_user_time\x18\x05 \x01(\x02\x12\x1b\n\x13process_kernel_time\x18\x06 \x01(\x02\x12\x1b\n\x13process_mem_max_rss\x18\x07 \x01(\x03\x12\x19\n\x11process_in_blocks\x18\x08 \x01(\x03\x12\x1a\n\x12process_out_blocks\x18\t \x01(\x03\"h\n\x11ResourceReportMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ResourceReportb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -175,580 +175,588 @@ _globals['_PROJECTFLAGSMOVEDDEPRECATION']._serialized_end=6576 _globals['_PROJECTFLAGSMOVEDDEPRECATIONMSG']._serialized_start=6579 _globals['_PROJECTFLAGSMOVEDDEPRECATIONMSG']._serialized_end=6711 - _globals['_SPACESINMODELNAMEDEPRECATION']._serialized_start=6713 - _globals['_SPACESINMODELNAMEDEPRECATION']._serialized_end=6801 - _globals['_SPACESINMODELNAMEDEPRECATIONMSG']._serialized_start=6804 - _globals['_SPACESINMODELNAMEDEPRECATIONMSG']._serialized_end=6936 - _globals['_TOTALMODELNAMESWITHSPACESDEPRECATION']._serialized_start=6938 - _globals['_TOTALMODELNAMESWITHSPACESDEPRECATION']._serialized_end=7045 - _globals['_TOTALMODELNAMESWITHSPACESDEPRECATIONMSG']._serialized_start=7048 - _globals['_TOTALMODELNAMESWITHSPACESDEPRECATIONMSG']._serialized_end=7196 - _globals['_DEPRECATEDMODEL']._serialized_start=7198 - _globals['_DEPRECATEDMODEL']._serialized_end=7284 - _globals['_DEPRECATEDMODELMSG']._serialized_start=7286 - _globals['_DEPRECATEDMODELMSG']._serialized_end=7392 - _globals['_INPUTFILEDIFFERROR']._serialized_start=7394 - _globals['_INPUTFILEDIFFERROR']._serialized_end=7449 - _globals['_INPUTFILEDIFFERRORMSG']._serialized_start=7451 - _globals['_INPUTFILEDIFFERRORMSG']._serialized_end=7563 - _globals['_INVALIDVALUEFORFIELD']._serialized_start=7565 - _globals['_INVALIDVALUEFORFIELD']._serialized_end=7628 - _globals['_INVALIDVALUEFORFIELDMSG']._serialized_start=7630 - _globals['_INVALIDVALUEFORFIELDMSG']._serialized_end=7746 - _globals['_VALIDATIONWARNING']._serialized_start=7748 - _globals['_VALIDATIONWARNING']._serialized_end=7829 - _globals['_VALIDATIONWARNINGMSG']._serialized_start=7831 - _globals['_VALIDATIONWARNINGMSG']._serialized_end=7941 - _globals['_PARSEPERFINFOPATH']._serialized_start=7943 - _globals['_PARSEPERFINFOPATH']._serialized_end=7976 - _globals['_PARSEPERFINFOPATHMSG']._serialized_start=7978 - _globals['_PARSEPERFINFOPATHMSG']._serialized_end=8088 - _globals['_PARTIALPARSINGERRORPROCESSINGFILE']._serialized_start=8090 - _globals['_PARTIALPARSINGERRORPROCESSINGFILE']._serialized_end=8139 - _globals['_PARTIALPARSINGERRORPROCESSINGFILEMSG']._serialized_start=8142 - _globals['_PARTIALPARSINGERRORPROCESSINGFILEMSG']._serialized_end=8284 - _globals['_PARTIALPARSINGERROR']._serialized_start=8287 - _globals['_PARTIALPARSINGERROR']._serialized_end=8421 - _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_start=8375 - _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_end=8421 - _globals['_PARTIALPARSINGERRORMSG']._serialized_start=8423 - _globals['_PARTIALPARSINGERRORMSG']._serialized_end=8537 - _globals['_PARTIALPARSINGSKIPPARSING']._serialized_start=8539 - _globals['_PARTIALPARSINGSKIPPARSING']._serialized_end=8566 - _globals['_PARTIALPARSINGSKIPPARSINGMSG']._serialized_start=8568 - _globals['_PARTIALPARSINGSKIPPARSINGMSG']._serialized_end=8694 - _globals['_UNABLETOPARTIALPARSE']._serialized_start=8696 - _globals['_UNABLETOPARTIALPARSE']._serialized_end=8734 - _globals['_UNABLETOPARTIALPARSEMSG']._serialized_start=8736 - _globals['_UNABLETOPARTIALPARSEMSG']._serialized_end=8852 - _globals['_STATECHECKVARSHASH']._serialized_start=8854 - _globals['_STATECHECKVARSHASH']._serialized_end=8956 - _globals['_STATECHECKVARSHASHMSG']._serialized_start=8958 - _globals['_STATECHECKVARSHASHMSG']._serialized_end=9070 - _globals['_PARTIALPARSINGNOTENABLED']._serialized_start=9072 - _globals['_PARTIALPARSINGNOTENABLED']._serialized_end=9098 - _globals['_PARTIALPARSINGNOTENABLEDMSG']._serialized_start=9100 - _globals['_PARTIALPARSINGNOTENABLEDMSG']._serialized_end=9224 - _globals['_PARSEDFILELOADFAILED']._serialized_start=9226 - _globals['_PARSEDFILELOADFAILED']._serialized_end=9293 - _globals['_PARSEDFILELOADFAILEDMSG']._serialized_start=9295 - _globals['_PARSEDFILELOADFAILEDMSG']._serialized_end=9411 - _globals['_PARTIALPARSINGENABLED']._serialized_start=9413 - _globals['_PARTIALPARSINGENABLED']._serialized_end=9485 - _globals['_PARTIALPARSINGENABLEDMSG']._serialized_start=9487 - _globals['_PARTIALPARSINGENABLEDMSG']._serialized_end=9605 - _globals['_PARTIALPARSINGFILE']._serialized_start=9607 - _globals['_PARTIALPARSINGFILE']._serialized_end=9663 - _globals['_PARTIALPARSINGFILEMSG']._serialized_start=9665 - _globals['_PARTIALPARSINGFILEMSG']._serialized_end=9777 - _globals['_INVALIDDISABLEDTARGETINTESTNODE']._serialized_start=9780 - _globals['_INVALIDDISABLEDTARGETINTESTNODE']._serialized_end=9955 - _globals['_INVALIDDISABLEDTARGETINTESTNODEMSG']._serialized_start=9958 - _globals['_INVALIDDISABLEDTARGETINTESTNODEMSG']._serialized_end=10096 - _globals['_UNUSEDRESOURCECONFIGPATH']._serialized_start=10098 - _globals['_UNUSEDRESOURCECONFIGPATH']._serialized_end=10153 - _globals['_UNUSEDRESOURCECONFIGPATHMSG']._serialized_start=10155 - _globals['_UNUSEDRESOURCECONFIGPATHMSG']._serialized_end=10279 - _globals['_SEEDINCREASED']._serialized_start=10281 - _globals['_SEEDINCREASED']._serialized_end=10332 - _globals['_SEEDINCREASEDMSG']._serialized_start=10334 - _globals['_SEEDINCREASEDMSG']._serialized_end=10436 - _globals['_SEEDEXCEEDSLIMITSAMEPATH']._serialized_start=10438 - _globals['_SEEDEXCEEDSLIMITSAMEPATH']._serialized_end=10500 - _globals['_SEEDEXCEEDSLIMITSAMEPATHMSG']._serialized_start=10502 - _globals['_SEEDEXCEEDSLIMITSAMEPATHMSG']._serialized_end=10626 - _globals['_SEEDEXCEEDSLIMITANDPATHCHANGED']._serialized_start=10628 - _globals['_SEEDEXCEEDSLIMITANDPATHCHANGED']._serialized_end=10696 - _globals['_SEEDEXCEEDSLIMITANDPATHCHANGEDMSG']._serialized_start=10699 - _globals['_SEEDEXCEEDSLIMITANDPATHCHANGEDMSG']._serialized_end=10835 - _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGED']._serialized_start=10837 - _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGED']._serialized_end=10929 - _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG']._serialized_start=10932 - _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG']._serialized_end=11070 - _globals['_UNUSEDTABLES']._serialized_start=11072 - _globals['_UNUSEDTABLES']._serialized_end=11109 - _globals['_UNUSEDTABLESMSG']._serialized_start=11111 - _globals['_UNUSEDTABLESMSG']._serialized_end=11211 - _globals['_WRONGRESOURCESCHEMAFILE']._serialized_start=11214 - _globals['_WRONGRESOURCESCHEMAFILE']._serialized_end=11349 - _globals['_WRONGRESOURCESCHEMAFILEMSG']._serialized_start=11351 - _globals['_WRONGRESOURCESCHEMAFILEMSG']._serialized_end=11473 - _globals['_NONODEFORYAMLKEY']._serialized_start=11475 - _globals['_NONODEFORYAMLKEY']._serialized_end=11550 - _globals['_NONODEFORYAMLKEYMSG']._serialized_start=11552 - _globals['_NONODEFORYAMLKEYMSG']._serialized_end=11660 - _globals['_MACRONOTFOUNDFORPATCH']._serialized_start=11662 - _globals['_MACRONOTFOUNDFORPATCH']._serialized_end=11705 - _globals['_MACRONOTFOUNDFORPATCHMSG']._serialized_start=11707 - _globals['_MACRONOTFOUNDFORPATCHMSG']._serialized_end=11825 - _globals['_NODENOTFOUNDORDISABLED']._serialized_start=11828 - _globals['_NODENOTFOUNDORDISABLED']._serialized_end=12012 - _globals['_NODENOTFOUNDORDISABLEDMSG']._serialized_start=12014 - _globals['_NODENOTFOUNDORDISABLEDMSG']._serialized_end=12134 - _globals['_JINJALOGWARNING']._serialized_start=12136 - _globals['_JINJALOGWARNING']._serialized_end=12208 - _globals['_JINJALOGWARNINGMSG']._serialized_start=12210 - _globals['_JINJALOGWARNINGMSG']._serialized_end=12316 - _globals['_JINJALOGINFO']._serialized_start=12318 - _globals['_JINJALOGINFO']._serialized_end=12387 - _globals['_JINJALOGINFOMSG']._serialized_start=12389 - _globals['_JINJALOGINFOMSG']._serialized_end=12489 - _globals['_JINJALOGDEBUG']._serialized_start=12491 - _globals['_JINJALOGDEBUG']._serialized_end=12561 - _globals['_JINJALOGDEBUGMSG']._serialized_start=12563 - _globals['_JINJALOGDEBUGMSG']._serialized_end=12665 - _globals['_UNPINNEDREFNEWVERSIONAVAILABLE']._serialized_start=12668 - _globals['_UNPINNEDREFNEWVERSIONAVAILABLE']._serialized_end=12842 - _globals['_UNPINNEDREFNEWVERSIONAVAILABLEMSG']._serialized_start=12845 - _globals['_UNPINNEDREFNEWVERSIONAVAILABLEMSG']._serialized_end=12981 - _globals['_UPCOMINGREFERENCEDEPRECATION']._serialized_start=12984 - _globals['_UPCOMINGREFERENCEDEPRECATION']._serialized_end=13182 - _globals['_UPCOMINGREFERENCEDEPRECATIONMSG']._serialized_start=13185 - _globals['_UPCOMINGREFERENCEDEPRECATIONMSG']._serialized_end=13317 - _globals['_DEPRECATEDREFERENCE']._serialized_start=13320 - _globals['_DEPRECATEDREFERENCE']._serialized_end=13509 - _globals['_DEPRECATEDREFERENCEMSG']._serialized_start=13511 - _globals['_DEPRECATEDREFERENCEMSG']._serialized_end=13625 - _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATION']._serialized_start=13627 - _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATION']._serialized_end=13687 - _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG']._serialized_start=13690 - _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG']._serialized_end=13838 - _globals['_PARSEINLINENODEERROR']._serialized_start=13840 - _globals['_PARSEINLINENODEERROR']._serialized_end=13917 - _globals['_PARSEINLINENODEERRORMSG']._serialized_start=13919 - _globals['_PARSEINLINENODEERRORMSG']._serialized_end=14035 - _globals['_SEMANTICVALIDATIONFAILURE']._serialized_start=14037 - _globals['_SEMANTICVALIDATIONFAILURE']._serialized_end=14077 - _globals['_SEMANTICVALIDATIONFAILUREMSG']._serialized_start=14079 - _globals['_SEMANTICVALIDATIONFAILUREMSG']._serialized_end=14205 - _globals['_UNVERSIONEDBREAKINGCHANGE']._serialized_start=14208 - _globals['_UNVERSIONEDBREAKINGCHANGE']._serialized_end=14602 - _globals['_UNVERSIONEDBREAKINGCHANGEMSG']._serialized_start=14604 - _globals['_UNVERSIONEDBREAKINGCHANGEMSG']._serialized_end=14730 - _globals['_WARNSTATETARGETEQUAL']._serialized_start=14732 - _globals['_WARNSTATETARGETEQUAL']._serialized_end=14774 - _globals['_WARNSTATETARGETEQUALMSG']._serialized_start=14776 - _globals['_WARNSTATETARGETEQUALMSG']._serialized_end=14892 - _globals['_FRESHNESSCONFIGPROBLEM']._serialized_start=14894 - _globals['_FRESHNESSCONFIGPROBLEM']._serialized_end=14931 - _globals['_FRESHNESSCONFIGPROBLEMMSG']._serialized_start=14933 - _globals['_FRESHNESSCONFIGPROBLEMMSG']._serialized_end=15053 - _globals['_GITSPARSECHECKOUTSUBDIRECTORY']._serialized_start=15055 - _globals['_GITSPARSECHECKOUTSUBDIRECTORY']._serialized_end=15102 - _globals['_GITSPARSECHECKOUTSUBDIRECTORYMSG']._serialized_start=15105 - _globals['_GITSPARSECHECKOUTSUBDIRECTORYMSG']._serialized_end=15239 - _globals['_GITPROGRESSCHECKOUTREVISION']._serialized_start=15241 - _globals['_GITPROGRESSCHECKOUTREVISION']._serialized_end=15288 - _globals['_GITPROGRESSCHECKOUTREVISIONMSG']._serialized_start=15291 - _globals['_GITPROGRESSCHECKOUTREVISIONMSG']._serialized_end=15421 - _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCY']._serialized_start=15423 - _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCY']._serialized_end=15475 - _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG']._serialized_start=15478 - _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG']._serialized_end=15628 - _globals['_GITPROGRESSPULLINGNEWDEPENDENCY']._serialized_start=15630 - _globals['_GITPROGRESSPULLINGNEWDEPENDENCY']._serialized_end=15676 - _globals['_GITPROGRESSPULLINGNEWDEPENDENCYMSG']._serialized_start=15679 - _globals['_GITPROGRESSPULLINGNEWDEPENDENCYMSG']._serialized_end=15817 - _globals['_GITNOTHINGTODO']._serialized_start=15819 - _globals['_GITNOTHINGTODO']._serialized_end=15848 - _globals['_GITNOTHINGTODOMSG']._serialized_start=15850 - _globals['_GITNOTHINGTODOMSG']._serialized_end=15954 - _globals['_GITPROGRESSUPDATEDCHECKOUTRANGE']._serialized_start=15956 - _globals['_GITPROGRESSUPDATEDCHECKOUTRANGE']._serialized_end=16025 - _globals['_GITPROGRESSUPDATEDCHECKOUTRANGEMSG']._serialized_start=16028 - _globals['_GITPROGRESSUPDATEDCHECKOUTRANGEMSG']._serialized_end=16166 - _globals['_GITPROGRESSCHECKEDOUTAT']._serialized_start=16168 - _globals['_GITPROGRESSCHECKEDOUTAT']._serialized_end=16210 - _globals['_GITPROGRESSCHECKEDOUTATMSG']._serialized_start=16212 - _globals['_GITPROGRESSCHECKEDOUTATMSG']._serialized_end=16334 - _globals['_REGISTRYPROGRESSGETREQUEST']._serialized_start=16336 - _globals['_REGISTRYPROGRESSGETREQUEST']._serialized_end=16377 - _globals['_REGISTRYPROGRESSGETREQUESTMSG']._serialized_start=16380 - _globals['_REGISTRYPROGRESSGETREQUESTMSG']._serialized_end=16508 - _globals['_REGISTRYPROGRESSGETRESPONSE']._serialized_start=16510 - _globals['_REGISTRYPROGRESSGETRESPONSE']._serialized_end=16571 - _globals['_REGISTRYPROGRESSGETRESPONSEMSG']._serialized_start=16574 - _globals['_REGISTRYPROGRESSGETRESPONSEMSG']._serialized_end=16704 - _globals['_SELECTORREPORTINVALIDSELECTOR']._serialized_start=16706 - _globals['_SELECTORREPORTINVALIDSELECTOR']._serialized_end=16801 - _globals['_SELECTORREPORTINVALIDSELECTORMSG']._serialized_start=16804 - _globals['_SELECTORREPORTINVALIDSELECTORMSG']._serialized_end=16938 - _globals['_DEPSNOPACKAGESFOUND']._serialized_start=16940 - _globals['_DEPSNOPACKAGESFOUND']._serialized_end=16961 - _globals['_DEPSNOPACKAGESFOUNDMSG']._serialized_start=16963 - _globals['_DEPSNOPACKAGESFOUNDMSG']._serialized_end=17077 - _globals['_DEPSSTARTPACKAGEINSTALL']._serialized_start=17079 - _globals['_DEPSSTARTPACKAGEINSTALL']._serialized_end=17126 - _globals['_DEPSSTARTPACKAGEINSTALLMSG']._serialized_start=17128 - _globals['_DEPSSTARTPACKAGEINSTALLMSG']._serialized_end=17250 - _globals['_DEPSINSTALLINFO']._serialized_start=17252 - _globals['_DEPSINSTALLINFO']._serialized_end=17291 - _globals['_DEPSINSTALLINFOMSG']._serialized_start=17293 - _globals['_DEPSINSTALLINFOMSG']._serialized_end=17399 - _globals['_DEPSUPDATEAVAILABLE']._serialized_start=17401 - _globals['_DEPSUPDATEAVAILABLE']._serialized_end=17446 - _globals['_DEPSUPDATEAVAILABLEMSG']._serialized_start=17448 - _globals['_DEPSUPDATEAVAILABLEMSG']._serialized_end=17562 - _globals['_DEPSUPTODATE']._serialized_start=17564 - _globals['_DEPSUPTODATE']._serialized_end=17578 - _globals['_DEPSUPTODATEMSG']._serialized_start=17580 - _globals['_DEPSUPTODATEMSG']._serialized_end=17680 - _globals['_DEPSLISTSUBDIRECTORY']._serialized_start=17682 - _globals['_DEPSLISTSUBDIRECTORY']._serialized_end=17726 - _globals['_DEPSLISTSUBDIRECTORYMSG']._serialized_start=17728 - _globals['_DEPSLISTSUBDIRECTORYMSG']._serialized_end=17844 - _globals['_DEPSNOTIFYUPDATESAVAILABLE']._serialized_start=17846 - _globals['_DEPSNOTIFYUPDATESAVAILABLE']._serialized_end=17892 - _globals['_DEPSNOTIFYUPDATESAVAILABLEMSG']._serialized_start=17895 - _globals['_DEPSNOTIFYUPDATESAVAILABLEMSG']._serialized_end=18023 - _globals['_REGISTRYINDEXPROGRESSGETREQUEST']._serialized_start=18025 - _globals['_REGISTRYINDEXPROGRESSGETREQUEST']._serialized_end=18071 - _globals['_REGISTRYINDEXPROGRESSGETREQUESTMSG']._serialized_start=18074 - _globals['_REGISTRYINDEXPROGRESSGETREQUESTMSG']._serialized_end=18212 - _globals['_REGISTRYINDEXPROGRESSGETRESPONSE']._serialized_start=18214 - _globals['_REGISTRYINDEXPROGRESSGETRESPONSE']._serialized_end=18280 - _globals['_REGISTRYINDEXPROGRESSGETRESPONSEMSG']._serialized_start=18283 - _globals['_REGISTRYINDEXPROGRESSGETRESPONSEMSG']._serialized_end=18423 - _globals['_REGISTRYRESPONSEUNEXPECTEDTYPE']._serialized_start=18425 - _globals['_REGISTRYRESPONSEUNEXPECTEDTYPE']._serialized_end=18475 - _globals['_REGISTRYRESPONSEUNEXPECTEDTYPEMSG']._serialized_start=18478 - _globals['_REGISTRYRESPONSEUNEXPECTEDTYPEMSG']._serialized_end=18614 - _globals['_REGISTRYRESPONSEMISSINGTOPKEYS']._serialized_start=18616 - _globals['_REGISTRYRESPONSEMISSINGTOPKEYS']._serialized_end=18666 - _globals['_REGISTRYRESPONSEMISSINGTOPKEYSMSG']._serialized_start=18669 - _globals['_REGISTRYRESPONSEMISSINGTOPKEYSMSG']._serialized_end=18805 - _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYS']._serialized_start=18807 - _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYS']._serialized_end=18860 - _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYSMSG']._serialized_start=18863 - _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYSMSG']._serialized_end=19005 - _globals['_REGISTRYRESPONSEEXTRANESTEDKEYS']._serialized_start=19007 - _globals['_REGISTRYRESPONSEEXTRANESTEDKEYS']._serialized_end=19058 - _globals['_REGISTRYRESPONSEEXTRANESTEDKEYSMSG']._serialized_start=19061 - _globals['_REGISTRYRESPONSEEXTRANESTEDKEYSMSG']._serialized_end=19199 - _globals['_DEPSSETDOWNLOADDIRECTORY']._serialized_start=19201 - _globals['_DEPSSETDOWNLOADDIRECTORY']._serialized_end=19241 - _globals['_DEPSSETDOWNLOADDIRECTORYMSG']._serialized_start=19243 - _globals['_DEPSSETDOWNLOADDIRECTORYMSG']._serialized_end=19367 - _globals['_DEPSUNPINNED']._serialized_start=19369 - _globals['_DEPSUNPINNED']._serialized_end=19414 - _globals['_DEPSUNPINNEDMSG']._serialized_start=19416 - _globals['_DEPSUNPINNEDMSG']._serialized_end=19516 - _globals['_NONODESFORSELECTIONCRITERIA']._serialized_start=19518 - _globals['_NONODESFORSELECTIONCRITERIA']._serialized_end=19565 - _globals['_NONODESFORSELECTIONCRITERIAMSG']._serialized_start=19568 - _globals['_NONODESFORSELECTIONCRITERIAMSG']._serialized_end=19698 - _globals['_DEPSLOCKUPDATING']._serialized_start=19700 - _globals['_DEPSLOCKUPDATING']._serialized_end=19741 - _globals['_DEPSLOCKUPDATINGMSG']._serialized_start=19743 - _globals['_DEPSLOCKUPDATINGMSG']._serialized_end=19851 - _globals['_DEPSADDPACKAGE']._serialized_start=19853 - _globals['_DEPSADDPACKAGE']._serialized_end=19935 - _globals['_DEPSADDPACKAGEMSG']._serialized_start=19937 - _globals['_DEPSADDPACKAGEMSG']._serialized_end=20041 - _globals['_DEPSFOUNDDUPLICATEPACKAGE']._serialized_start=20044 - _globals['_DEPSFOUNDDUPLICATEPACKAGE']._serialized_end=20211 - _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_start=20158 - _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_end=20211 - _globals['_DEPSFOUNDDUPLICATEPACKAGEMSG']._serialized_start=20213 - _globals['_DEPSFOUNDDUPLICATEPACKAGEMSG']._serialized_end=20339 - _globals['_DEPSVERSIONMISSING']._serialized_start=20341 - _globals['_DEPSVERSIONMISSING']._serialized_end=20377 - _globals['_DEPSVERSIONMISSINGMSG']._serialized_start=20379 - _globals['_DEPSVERSIONMISSINGMSG']._serialized_end=20491 - _globals['_DEPSSCRUBBEDPACKAGENAME']._serialized_start=20493 - _globals['_DEPSSCRUBBEDPACKAGENAME']._serialized_end=20540 - _globals['_DEPSSCRUBBEDPACKAGENAMEMSG']._serialized_start=20542 - _globals['_DEPSSCRUBBEDPACKAGENAMEMSG']._serialized_end=20664 - _globals['_RUNNINGOPERATIONCAUGHTERROR']._serialized_start=20666 - _globals['_RUNNINGOPERATIONCAUGHTERROR']._serialized_end=20708 - _globals['_RUNNINGOPERATIONCAUGHTERRORMSG']._serialized_start=20711 - _globals['_RUNNINGOPERATIONCAUGHTERRORMSG']._serialized_end=20841 - _globals['_COMPILECOMPLETE']._serialized_start=20843 - _globals['_COMPILECOMPLETE']._serialized_end=20860 - _globals['_COMPILECOMPLETEMSG']._serialized_start=20862 - _globals['_COMPILECOMPLETEMSG']._serialized_end=20968 - _globals['_FRESHNESSCHECKCOMPLETE']._serialized_start=20970 - _globals['_FRESHNESSCHECKCOMPLETE']._serialized_end=20994 - _globals['_FRESHNESSCHECKCOMPLETEMSG']._serialized_start=20996 - _globals['_FRESHNESSCHECKCOMPLETEMSG']._serialized_end=21116 - _globals['_SEEDHEADER']._serialized_start=21118 - _globals['_SEEDHEADER']._serialized_end=21146 - _globals['_SEEDHEADERMSG']._serialized_start=21148 - _globals['_SEEDHEADERMSG']._serialized_end=21244 - _globals['_SQLRUNNEREXCEPTION']._serialized_start=21246 - _globals['_SQLRUNNEREXCEPTION']._serialized_end=21339 - _globals['_SQLRUNNEREXCEPTIONMSG']._serialized_start=21341 - _globals['_SQLRUNNEREXCEPTIONMSG']._serialized_end=21453 - _globals['_LOGTESTRESULT']._serialized_start=21456 - _globals['_LOGTESTRESULT']._serialized_end=21624 - _globals['_LOGTESTRESULTMSG']._serialized_start=21626 - _globals['_LOGTESTRESULTMSG']._serialized_end=21728 - _globals['_LOGSTARTLINE']._serialized_start=21730 - _globals['_LOGSTARTLINE']._serialized_end=21837 - _globals['_LOGSTARTLINEMSG']._serialized_start=21839 - _globals['_LOGSTARTLINEMSG']._serialized_end=21939 - _globals['_LOGMODELRESULT']._serialized_start=21942 - _globals['_LOGMODELRESULT']._serialized_end=22091 - _globals['_LOGMODELRESULTMSG']._serialized_start=22093 - _globals['_LOGMODELRESULTMSG']._serialized_end=22197 - _globals['_LOGSNAPSHOTRESULT']._serialized_start=22200 - _globals['_LOGSNAPSHOTRESULT']._serialized_end=22474 - _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_start=22432 - _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_end=22474 - _globals['_LOGSNAPSHOTRESULTMSG']._serialized_start=22476 - _globals['_LOGSNAPSHOTRESULTMSG']._serialized_end=22586 - _globals['_LOGSEEDRESULT']._serialized_start=22589 - _globals['_LOGSEEDRESULT']._serialized_end=22774 - _globals['_LOGSEEDRESULTMSG']._serialized_start=22776 - _globals['_LOGSEEDRESULTMSG']._serialized_end=22878 - _globals['_LOGFRESHNESSRESULT']._serialized_start=22881 - _globals['_LOGFRESHNESSRESULT']._serialized_end=23054 - _globals['_LOGFRESHNESSRESULTMSG']._serialized_start=23056 - _globals['_LOGFRESHNESSRESULTMSG']._serialized_end=23168 - _globals['_LOGNODENOOPRESULT']._serialized_start=23171 - _globals['_LOGNODENOOPRESULT']._serialized_end=23323 - _globals['_LOGNODENOOPRESULTMSG']._serialized_start=23325 - _globals['_LOGNODENOOPRESULTMSG']._serialized_end=23435 - _globals['_LOGCANCELLINE']._serialized_start=23437 - _globals['_LOGCANCELLINE']._serialized_end=23471 - _globals['_LOGCANCELLINEMSG']._serialized_start=23473 - _globals['_LOGCANCELLINEMSG']._serialized_end=23575 - _globals['_DEFAULTSELECTOR']._serialized_start=23577 - _globals['_DEFAULTSELECTOR']._serialized_end=23608 - _globals['_DEFAULTSELECTORMSG']._serialized_start=23610 - _globals['_DEFAULTSELECTORMSG']._serialized_end=23716 - _globals['_NODESTART']._serialized_start=23718 - _globals['_NODESTART']._serialized_end=23771 - _globals['_NODESTARTMSG']._serialized_start=23773 - _globals['_NODESTARTMSG']._serialized_end=23867 - _globals['_NODEFINISHED']._serialized_start=23869 - _globals['_NODEFINISHED']._serialized_end=23972 - _globals['_NODEFINISHEDMSG']._serialized_start=23974 - _globals['_NODEFINISHEDMSG']._serialized_end=24074 - _globals['_QUERYCANCELATIONUNSUPPORTED']._serialized_start=24076 - _globals['_QUERYCANCELATIONUNSUPPORTED']._serialized_end=24119 - _globals['_QUERYCANCELATIONUNSUPPORTEDMSG']._serialized_start=24122 - _globals['_QUERYCANCELATIONUNSUPPORTEDMSG']._serialized_end=24252 - _globals['_CONCURRENCYLINE']._serialized_start=24254 - _globals['_CONCURRENCYLINE']._serialized_end=24333 - _globals['_CONCURRENCYLINEMSG']._serialized_start=24335 - _globals['_CONCURRENCYLINEMSG']._serialized_end=24441 - _globals['_WRITINGINJECTEDSQLFORNODE']._serialized_start=24443 - _globals['_WRITINGINJECTEDSQLFORNODE']._serialized_end=24512 - _globals['_WRITINGINJECTEDSQLFORNODEMSG']._serialized_start=24514 - _globals['_WRITINGINJECTEDSQLFORNODEMSG']._serialized_end=24640 - _globals['_NODECOMPILING']._serialized_start=24642 - _globals['_NODECOMPILING']._serialized_end=24699 - _globals['_NODECOMPILINGMSG']._serialized_start=24701 - _globals['_NODECOMPILINGMSG']._serialized_end=24803 - _globals['_NODEEXECUTING']._serialized_start=24805 - _globals['_NODEEXECUTING']._serialized_end=24862 - _globals['_NODEEXECUTINGMSG']._serialized_start=24864 - _globals['_NODEEXECUTINGMSG']._serialized_end=24966 - _globals['_LOGHOOKSTARTLINE']._serialized_start=24968 - _globals['_LOGHOOKSTARTLINE']._serialized_end=25077 - _globals['_LOGHOOKSTARTLINEMSG']._serialized_start=25079 - _globals['_LOGHOOKSTARTLINEMSG']._serialized_end=25187 - _globals['_LOGHOOKENDLINE']._serialized_start=25190 - _globals['_LOGHOOKENDLINE']._serialized_end=25337 - _globals['_LOGHOOKENDLINEMSG']._serialized_start=25339 - _globals['_LOGHOOKENDLINEMSG']._serialized_end=25443 - _globals['_SKIPPINGDETAILS']._serialized_start=25446 - _globals['_SKIPPINGDETAILS']._serialized_end=25593 - _globals['_SKIPPINGDETAILSMSG']._serialized_start=25595 - _globals['_SKIPPINGDETAILSMSG']._serialized_end=25701 - _globals['_NOTHINGTODO']._serialized_start=25703 - _globals['_NOTHINGTODO']._serialized_end=25716 - _globals['_NOTHINGTODOMSG']._serialized_start=25718 - _globals['_NOTHINGTODOMSG']._serialized_end=25816 - _globals['_RUNNINGOPERATIONUNCAUGHTERROR']._serialized_start=25818 - _globals['_RUNNINGOPERATIONUNCAUGHTERROR']._serialized_end=25862 - _globals['_RUNNINGOPERATIONUNCAUGHTERRORMSG']._serialized_start=25865 - _globals['_RUNNINGOPERATIONUNCAUGHTERRORMSG']._serialized_end=25999 - _globals['_ENDRUNRESULT']._serialized_start=26002 - _globals['_ENDRUNRESULT']._serialized_end=26149 - _globals['_ENDRUNRESULTMSG']._serialized_start=26151 - _globals['_ENDRUNRESULTMSG']._serialized_end=26251 - _globals['_NONODESSELECTED']._serialized_start=26253 - _globals['_NONODESSELECTED']._serialized_end=26270 - _globals['_NONODESSELECTEDMSG']._serialized_start=26272 - _globals['_NONODESSELECTEDMSG']._serialized_end=26378 - _globals['_COMMANDCOMPLETED']._serialized_start=26380 - _globals['_COMMANDCOMPLETED']._serialized_end=26499 - _globals['_COMMANDCOMPLETEDMSG']._serialized_start=26501 - _globals['_COMMANDCOMPLETEDMSG']._serialized_end=26609 - _globals['_SHOWNODE']._serialized_start=26611 - _globals['_SHOWNODE']._serialized_end=26718 - _globals['_SHOWNODEMSG']._serialized_start=26720 - _globals['_SHOWNODEMSG']._serialized_end=26812 - _globals['_COMPILEDNODE']._serialized_start=26814 - _globals['_COMPILEDNODE']._serialized_end=26926 - _globals['_COMPILEDNODEMSG']._serialized_start=26928 - _globals['_COMPILEDNODEMSG']._serialized_end=27028 - _globals['_CATCHABLEEXCEPTIONONRUN']._serialized_start=27030 - _globals['_CATCHABLEEXCEPTIONONRUN']._serialized_end=27128 - _globals['_CATCHABLEEXCEPTIONONRUNMSG']._serialized_start=27130 - _globals['_CATCHABLEEXCEPTIONONRUNMSG']._serialized_end=27252 - _globals['_INTERNALERRORONRUN']._serialized_start=27254 - _globals['_INTERNALERRORONRUN']._serialized_end=27349 - _globals['_INTERNALERRORONRUNMSG']._serialized_start=27351 - _globals['_INTERNALERRORONRUNMSG']._serialized_end=27463 - _globals['_GENERICEXCEPTIONONRUN']._serialized_start=27465 - _globals['_GENERICEXCEPTIONONRUN']._serialized_end=27582 - _globals['_GENERICEXCEPTIONONRUNMSG']._serialized_start=27584 - _globals['_GENERICEXCEPTIONONRUNMSG']._serialized_end=27702 - _globals['_NODECONNECTIONRELEASEERROR']._serialized_start=27704 - _globals['_NODECONNECTIONRELEASEERROR']._serialized_end=27782 - _globals['_NODECONNECTIONRELEASEERRORMSG']._serialized_start=27785 - _globals['_NODECONNECTIONRELEASEERRORMSG']._serialized_end=27913 - _globals['_FOUNDSTATS']._serialized_start=27915 - _globals['_FOUNDSTATS']._serialized_end=27946 - _globals['_FOUNDSTATSMSG']._serialized_start=27948 - _globals['_FOUNDSTATSMSG']._serialized_end=28044 - _globals['_MAINKEYBOARDINTERRUPT']._serialized_start=28046 - _globals['_MAINKEYBOARDINTERRUPT']._serialized_end=28069 - _globals['_MAINKEYBOARDINTERRUPTMSG']._serialized_start=28071 - _globals['_MAINKEYBOARDINTERRUPTMSG']._serialized_end=28189 - _globals['_MAINENCOUNTEREDERROR']._serialized_start=28191 - _globals['_MAINENCOUNTEREDERROR']._serialized_end=28226 - _globals['_MAINENCOUNTEREDERRORMSG']._serialized_start=28228 - _globals['_MAINENCOUNTEREDERRORMSG']._serialized_end=28344 - _globals['_MAINSTACKTRACE']._serialized_start=28346 - _globals['_MAINSTACKTRACE']._serialized_end=28383 - _globals['_MAINSTACKTRACEMSG']._serialized_start=28385 - _globals['_MAINSTACKTRACEMSG']._serialized_end=28489 - _globals['_TIMINGINFOCOLLECTED']._serialized_start=28491 - _globals['_TIMINGINFOCOLLECTED']._serialized_end=28603 - _globals['_TIMINGINFOCOLLECTEDMSG']._serialized_start=28605 - _globals['_TIMINGINFOCOLLECTEDMSG']._serialized_end=28719 - _globals['_LOGDEBUGSTACKTRACE']._serialized_start=28721 - _globals['_LOGDEBUGSTACKTRACE']._serialized_end=28759 - _globals['_LOGDEBUGSTACKTRACEMSG']._serialized_start=28761 - _globals['_LOGDEBUGSTACKTRACEMSG']._serialized_end=28873 - _globals['_CHECKCLEANPATH']._serialized_start=28875 - _globals['_CHECKCLEANPATH']._serialized_end=28905 - _globals['_CHECKCLEANPATHMSG']._serialized_start=28907 - _globals['_CHECKCLEANPATHMSG']._serialized_end=29011 - _globals['_CONFIRMCLEANPATH']._serialized_start=29013 - _globals['_CONFIRMCLEANPATH']._serialized_end=29045 - _globals['_CONFIRMCLEANPATHMSG']._serialized_start=29047 - _globals['_CONFIRMCLEANPATHMSG']._serialized_end=29155 - _globals['_PROTECTEDCLEANPATH']._serialized_start=29157 - _globals['_PROTECTEDCLEANPATH']._serialized_end=29191 - _globals['_PROTECTEDCLEANPATHMSG']._serialized_start=29193 - _globals['_PROTECTEDCLEANPATHMSG']._serialized_end=29305 - _globals['_FINISHEDCLEANPATHS']._serialized_start=29307 - _globals['_FINISHEDCLEANPATHS']._serialized_end=29327 - _globals['_FINISHEDCLEANPATHSMSG']._serialized_start=29329 - _globals['_FINISHEDCLEANPATHSMSG']._serialized_end=29441 - _globals['_OPENCOMMAND']._serialized_start=29443 - _globals['_OPENCOMMAND']._serialized_end=29496 - _globals['_OPENCOMMANDMSG']._serialized_start=29498 - _globals['_OPENCOMMANDMSG']._serialized_end=29596 - _globals['_SERVINGDOCSPORT']._serialized_start=29598 - _globals['_SERVINGDOCSPORT']._serialized_end=29646 - _globals['_SERVINGDOCSPORTMSG']._serialized_start=29648 - _globals['_SERVINGDOCSPORTMSG']._serialized_end=29754 - _globals['_SERVINGDOCSACCESSINFO']._serialized_start=29756 - _globals['_SERVINGDOCSACCESSINFO']._serialized_end=29793 - _globals['_SERVINGDOCSACCESSINFOMSG']._serialized_start=29795 - _globals['_SERVINGDOCSACCESSINFOMSG']._serialized_end=29913 - _globals['_SERVINGDOCSEXITINFO']._serialized_start=29915 - _globals['_SERVINGDOCSEXITINFO']._serialized_end=29936 - _globals['_SERVINGDOCSEXITINFOMSG']._serialized_start=29938 - _globals['_SERVINGDOCSEXITINFOMSG']._serialized_end=30052 - _globals['_RUNRESULTWARNING']._serialized_start=30054 - _globals['_RUNRESULTWARNING']._serialized_end=30170 - _globals['_RUNRESULTWARNINGMSG']._serialized_start=30172 - _globals['_RUNRESULTWARNINGMSG']._serialized_end=30280 - _globals['_RUNRESULTFAILURE']._serialized_start=30282 - _globals['_RUNRESULTFAILURE']._serialized_end=30398 - _globals['_RUNRESULTFAILUREMSG']._serialized_start=30400 - _globals['_RUNRESULTFAILUREMSG']._serialized_end=30508 - _globals['_STATSLINE']._serialized_start=30510 - _globals['_STATSLINE']._serialized_end=30617 - _globals['_STATSLINE_STATSENTRY']._serialized_start=30573 - _globals['_STATSLINE_STATSENTRY']._serialized_end=30617 - _globals['_STATSLINEMSG']._serialized_start=30619 - _globals['_STATSLINEMSG']._serialized_end=30713 - _globals['_RUNRESULTERROR']._serialized_start=30715 - _globals['_RUNRESULTERROR']._serialized_end=30786 - _globals['_RUNRESULTERRORMSG']._serialized_start=30788 - _globals['_RUNRESULTERRORMSG']._serialized_end=30892 - _globals['_RUNRESULTERRORNOMESSAGE']._serialized_start=30894 - _globals['_RUNRESULTERRORNOMESSAGE']._serialized_end=30977 - _globals['_RUNRESULTERRORNOMESSAGEMSG']._serialized_start=30979 - _globals['_RUNRESULTERRORNOMESSAGEMSG']._serialized_end=31101 - _globals['_SQLCOMPILEDPATH']._serialized_start=31103 - _globals['_SQLCOMPILEDPATH']._serialized_end=31176 - _globals['_SQLCOMPILEDPATHMSG']._serialized_start=31178 - _globals['_SQLCOMPILEDPATHMSG']._serialized_end=31284 - _globals['_CHECKNODETESTFAILURE']._serialized_start=31286 - _globals['_CHECKNODETESTFAILURE']._serialized_end=31373 - _globals['_CHECKNODETESTFAILUREMSG']._serialized_start=31375 - _globals['_CHECKNODETESTFAILUREMSG']._serialized_end=31491 - _globals['_ENDOFRUNSUMMARY']._serialized_start=31493 - _globals['_ENDOFRUNSUMMARY']._serialized_end=31580 - _globals['_ENDOFRUNSUMMARYMSG']._serialized_start=31582 - _globals['_ENDOFRUNSUMMARYMSG']._serialized_end=31688 - _globals['_LOGSKIPBECAUSEERROR']._serialized_start=31690 - _globals['_LOGSKIPBECAUSEERROR']._serialized_end=31775 - _globals['_LOGSKIPBECAUSEERRORMSG']._serialized_start=31777 - _globals['_LOGSKIPBECAUSEERRORMSG']._serialized_end=31891 - _globals['_ENSUREGITINSTALLED']._serialized_start=31893 - _globals['_ENSUREGITINSTALLED']._serialized_end=31913 - _globals['_ENSUREGITINSTALLEDMSG']._serialized_start=31915 - _globals['_ENSUREGITINSTALLEDMSG']._serialized_end=32027 - _globals['_DEPSCREATINGLOCALSYMLINK']._serialized_start=32029 - _globals['_DEPSCREATINGLOCALSYMLINK']._serialized_end=32055 - _globals['_DEPSCREATINGLOCALSYMLINKMSG']._serialized_start=32057 - _globals['_DEPSCREATINGLOCALSYMLINKMSG']._serialized_end=32181 - _globals['_DEPSSYMLINKNOTAVAILABLE']._serialized_start=32183 - _globals['_DEPSSYMLINKNOTAVAILABLE']._serialized_end=32208 - _globals['_DEPSSYMLINKNOTAVAILABLEMSG']._serialized_start=32210 - _globals['_DEPSSYMLINKNOTAVAILABLEMSG']._serialized_end=32332 - _globals['_DISABLETRACKING']._serialized_start=32334 - _globals['_DISABLETRACKING']._serialized_end=32351 - _globals['_DISABLETRACKINGMSG']._serialized_start=32353 - _globals['_DISABLETRACKINGMSG']._serialized_end=32459 - _globals['_SENDINGEVENT']._serialized_start=32461 - _globals['_SENDINGEVENT']._serialized_end=32491 - _globals['_SENDINGEVENTMSG']._serialized_start=32493 - _globals['_SENDINGEVENTMSG']._serialized_end=32593 - _globals['_SENDEVENTFAILURE']._serialized_start=32595 - _globals['_SENDEVENTFAILURE']._serialized_end=32613 - _globals['_SENDEVENTFAILUREMSG']._serialized_start=32615 - _globals['_SENDEVENTFAILUREMSG']._serialized_end=32723 - _globals['_FLUSHEVENTS']._serialized_start=32725 - _globals['_FLUSHEVENTS']._serialized_end=32738 - _globals['_FLUSHEVENTSMSG']._serialized_start=32740 - _globals['_FLUSHEVENTSMSG']._serialized_end=32838 - _globals['_FLUSHEVENTSFAILURE']._serialized_start=32840 - _globals['_FLUSHEVENTSFAILURE']._serialized_end=32860 - _globals['_FLUSHEVENTSFAILUREMSG']._serialized_start=32862 - _globals['_FLUSHEVENTSFAILUREMSG']._serialized_end=32974 - _globals['_TRACKINGINITIALIZEFAILURE']._serialized_start=32976 - _globals['_TRACKINGINITIALIZEFAILURE']._serialized_end=33021 - _globals['_TRACKINGINITIALIZEFAILUREMSG']._serialized_start=33023 - _globals['_TRACKINGINITIALIZEFAILUREMSG']._serialized_end=33149 - _globals['_RUNRESULTWARNINGMESSAGE']._serialized_start=33151 - _globals['_RUNRESULTWARNINGMESSAGE']._serialized_end=33231 - _globals['_RUNRESULTWARNINGMESSAGEMSG']._serialized_start=33233 - _globals['_RUNRESULTWARNINGMESSAGEMSG']._serialized_end=33355 - _globals['_DEBUGCMDOUT']._serialized_start=33357 - _globals['_DEBUGCMDOUT']._serialized_end=33383 - _globals['_DEBUGCMDOUTMSG']._serialized_start=33385 - _globals['_DEBUGCMDOUTMSG']._serialized_end=33483 - _globals['_DEBUGCMDRESULT']._serialized_start=33485 - _globals['_DEBUGCMDRESULT']._serialized_end=33514 - _globals['_DEBUGCMDRESULTMSG']._serialized_start=33516 - _globals['_DEBUGCMDRESULTMSG']._serialized_end=33620 - _globals['_LISTCMDOUT']._serialized_start=33622 - _globals['_LISTCMDOUT']._serialized_end=33647 - _globals['_LISTCMDOUTMSG']._serialized_start=33649 - _globals['_LISTCMDOUTMSG']._serialized_end=33745 - _globals['_RESOURCEREPORT']._serialized_start=33748 - _globals['_RESOURCEREPORT']._serialized_end=33984 - _globals['_RESOURCEREPORTMSG']._serialized_start=33986 - _globals['_RESOURCEREPORTMSG']._serialized_end=34090 + _globals['_SPACESINRESOURCENAMEDEPRECATION']._serialized_start=6713 + _globals['_SPACESINRESOURCENAMEDEPRECATION']._serialized_end=6780 + _globals['_SPACESINRESOURCENAMEDEPRECATIONMSG']._serialized_start=6783 + _globals['_SPACESINRESOURCENAMEDEPRECATIONMSG']._serialized_end=6921 + _globals['_RESOURCENAMESWITHSPACESDEPRECATION']._serialized_start=6923 + _globals['_RESOURCENAMESWITHSPACESDEPRECATION']._serialized_end=7028 + _globals['_RESOURCENAMESWITHSPACESDEPRECATIONMSG']._serialized_start=7031 + _globals['_RESOURCENAMESWITHSPACESDEPRECATIONMSG']._serialized_end=7175 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATION']._serialized_start=7177 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATION']._serialized_end=7272 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATIONMSG']._serialized_start=7275 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATIONMSG']._serialized_end=7433 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUN']._serialized_start=7435 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUN']._serialized_end=7470 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUNMSG']._serialized_start=7473 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUNMSG']._serialized_end=7615 + _globals['_DEPRECATEDMODEL']._serialized_start=7617 + _globals['_DEPRECATEDMODEL']._serialized_end=7703 + _globals['_DEPRECATEDMODELMSG']._serialized_start=7705 + _globals['_DEPRECATEDMODELMSG']._serialized_end=7811 + _globals['_INPUTFILEDIFFERROR']._serialized_start=7813 + _globals['_INPUTFILEDIFFERROR']._serialized_end=7868 + _globals['_INPUTFILEDIFFERRORMSG']._serialized_start=7870 + _globals['_INPUTFILEDIFFERRORMSG']._serialized_end=7982 + _globals['_INVALIDVALUEFORFIELD']._serialized_start=7984 + _globals['_INVALIDVALUEFORFIELD']._serialized_end=8047 + _globals['_INVALIDVALUEFORFIELDMSG']._serialized_start=8049 + _globals['_INVALIDVALUEFORFIELDMSG']._serialized_end=8165 + _globals['_VALIDATIONWARNING']._serialized_start=8167 + _globals['_VALIDATIONWARNING']._serialized_end=8248 + _globals['_VALIDATIONWARNINGMSG']._serialized_start=8250 + _globals['_VALIDATIONWARNINGMSG']._serialized_end=8360 + _globals['_PARSEPERFINFOPATH']._serialized_start=8362 + _globals['_PARSEPERFINFOPATH']._serialized_end=8395 + _globals['_PARSEPERFINFOPATHMSG']._serialized_start=8397 + _globals['_PARSEPERFINFOPATHMSG']._serialized_end=8507 + _globals['_PARTIALPARSINGERRORPROCESSINGFILE']._serialized_start=8509 + _globals['_PARTIALPARSINGERRORPROCESSINGFILE']._serialized_end=8558 + _globals['_PARTIALPARSINGERRORPROCESSINGFILEMSG']._serialized_start=8561 + _globals['_PARTIALPARSINGERRORPROCESSINGFILEMSG']._serialized_end=8703 + _globals['_PARTIALPARSINGERROR']._serialized_start=8706 + _globals['_PARTIALPARSINGERROR']._serialized_end=8840 + _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_start=8794 + _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_end=8840 + _globals['_PARTIALPARSINGERRORMSG']._serialized_start=8842 + _globals['_PARTIALPARSINGERRORMSG']._serialized_end=8956 + _globals['_PARTIALPARSINGSKIPPARSING']._serialized_start=8958 + _globals['_PARTIALPARSINGSKIPPARSING']._serialized_end=8985 + _globals['_PARTIALPARSINGSKIPPARSINGMSG']._serialized_start=8987 + _globals['_PARTIALPARSINGSKIPPARSINGMSG']._serialized_end=9113 + _globals['_UNABLETOPARTIALPARSE']._serialized_start=9115 + _globals['_UNABLETOPARTIALPARSE']._serialized_end=9153 + _globals['_UNABLETOPARTIALPARSEMSG']._serialized_start=9155 + _globals['_UNABLETOPARTIALPARSEMSG']._serialized_end=9271 + _globals['_STATECHECKVARSHASH']._serialized_start=9273 + _globals['_STATECHECKVARSHASH']._serialized_end=9375 + _globals['_STATECHECKVARSHASHMSG']._serialized_start=9377 + _globals['_STATECHECKVARSHASHMSG']._serialized_end=9489 + _globals['_PARTIALPARSINGNOTENABLED']._serialized_start=9491 + _globals['_PARTIALPARSINGNOTENABLED']._serialized_end=9517 + _globals['_PARTIALPARSINGNOTENABLEDMSG']._serialized_start=9519 + _globals['_PARTIALPARSINGNOTENABLEDMSG']._serialized_end=9643 + _globals['_PARSEDFILELOADFAILED']._serialized_start=9645 + _globals['_PARSEDFILELOADFAILED']._serialized_end=9712 + _globals['_PARSEDFILELOADFAILEDMSG']._serialized_start=9714 + _globals['_PARSEDFILELOADFAILEDMSG']._serialized_end=9830 + _globals['_PARTIALPARSINGENABLED']._serialized_start=9832 + _globals['_PARTIALPARSINGENABLED']._serialized_end=9904 + _globals['_PARTIALPARSINGENABLEDMSG']._serialized_start=9906 + _globals['_PARTIALPARSINGENABLEDMSG']._serialized_end=10024 + _globals['_PARTIALPARSINGFILE']._serialized_start=10026 + _globals['_PARTIALPARSINGFILE']._serialized_end=10082 + _globals['_PARTIALPARSINGFILEMSG']._serialized_start=10084 + _globals['_PARTIALPARSINGFILEMSG']._serialized_end=10196 + _globals['_INVALIDDISABLEDTARGETINTESTNODE']._serialized_start=10199 + _globals['_INVALIDDISABLEDTARGETINTESTNODE']._serialized_end=10374 + _globals['_INVALIDDISABLEDTARGETINTESTNODEMSG']._serialized_start=10377 + _globals['_INVALIDDISABLEDTARGETINTESTNODEMSG']._serialized_end=10515 + _globals['_UNUSEDRESOURCECONFIGPATH']._serialized_start=10517 + _globals['_UNUSEDRESOURCECONFIGPATH']._serialized_end=10572 + _globals['_UNUSEDRESOURCECONFIGPATHMSG']._serialized_start=10574 + _globals['_UNUSEDRESOURCECONFIGPATHMSG']._serialized_end=10698 + _globals['_SEEDINCREASED']._serialized_start=10700 + _globals['_SEEDINCREASED']._serialized_end=10751 + _globals['_SEEDINCREASEDMSG']._serialized_start=10753 + _globals['_SEEDINCREASEDMSG']._serialized_end=10855 + _globals['_SEEDEXCEEDSLIMITSAMEPATH']._serialized_start=10857 + _globals['_SEEDEXCEEDSLIMITSAMEPATH']._serialized_end=10919 + _globals['_SEEDEXCEEDSLIMITSAMEPATHMSG']._serialized_start=10921 + _globals['_SEEDEXCEEDSLIMITSAMEPATHMSG']._serialized_end=11045 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGED']._serialized_start=11047 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGED']._serialized_end=11115 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGEDMSG']._serialized_start=11118 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGEDMSG']._serialized_end=11254 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGED']._serialized_start=11256 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGED']._serialized_end=11348 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG']._serialized_start=11351 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG']._serialized_end=11489 + _globals['_UNUSEDTABLES']._serialized_start=11491 + _globals['_UNUSEDTABLES']._serialized_end=11528 + _globals['_UNUSEDTABLESMSG']._serialized_start=11530 + _globals['_UNUSEDTABLESMSG']._serialized_end=11630 + _globals['_WRONGRESOURCESCHEMAFILE']._serialized_start=11633 + _globals['_WRONGRESOURCESCHEMAFILE']._serialized_end=11768 + _globals['_WRONGRESOURCESCHEMAFILEMSG']._serialized_start=11770 + _globals['_WRONGRESOURCESCHEMAFILEMSG']._serialized_end=11892 + _globals['_NONODEFORYAMLKEY']._serialized_start=11894 + _globals['_NONODEFORYAMLKEY']._serialized_end=11969 + _globals['_NONODEFORYAMLKEYMSG']._serialized_start=11971 + _globals['_NONODEFORYAMLKEYMSG']._serialized_end=12079 + _globals['_MACRONOTFOUNDFORPATCH']._serialized_start=12081 + _globals['_MACRONOTFOUNDFORPATCH']._serialized_end=12124 + _globals['_MACRONOTFOUNDFORPATCHMSG']._serialized_start=12126 + _globals['_MACRONOTFOUNDFORPATCHMSG']._serialized_end=12244 + _globals['_NODENOTFOUNDORDISABLED']._serialized_start=12247 + _globals['_NODENOTFOUNDORDISABLED']._serialized_end=12431 + _globals['_NODENOTFOUNDORDISABLEDMSG']._serialized_start=12433 + _globals['_NODENOTFOUNDORDISABLEDMSG']._serialized_end=12553 + _globals['_JINJALOGWARNING']._serialized_start=12555 + _globals['_JINJALOGWARNING']._serialized_end=12627 + _globals['_JINJALOGWARNINGMSG']._serialized_start=12629 + _globals['_JINJALOGWARNINGMSG']._serialized_end=12735 + _globals['_JINJALOGINFO']._serialized_start=12737 + _globals['_JINJALOGINFO']._serialized_end=12806 + _globals['_JINJALOGINFOMSG']._serialized_start=12808 + _globals['_JINJALOGINFOMSG']._serialized_end=12908 + _globals['_JINJALOGDEBUG']._serialized_start=12910 + _globals['_JINJALOGDEBUG']._serialized_end=12980 + _globals['_JINJALOGDEBUGMSG']._serialized_start=12982 + _globals['_JINJALOGDEBUGMSG']._serialized_end=13084 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLE']._serialized_start=13087 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLE']._serialized_end=13261 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLEMSG']._serialized_start=13264 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLEMSG']._serialized_end=13400 + _globals['_UPCOMINGREFERENCEDEPRECATION']._serialized_start=13403 + _globals['_UPCOMINGREFERENCEDEPRECATION']._serialized_end=13601 + _globals['_UPCOMINGREFERENCEDEPRECATIONMSG']._serialized_start=13604 + _globals['_UPCOMINGREFERENCEDEPRECATIONMSG']._serialized_end=13736 + _globals['_DEPRECATEDREFERENCE']._serialized_start=13739 + _globals['_DEPRECATEDREFERENCE']._serialized_end=13928 + _globals['_DEPRECATEDREFERENCEMSG']._serialized_start=13930 + _globals['_DEPRECATEDREFERENCEMSG']._serialized_end=14044 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATION']._serialized_start=14046 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATION']._serialized_end=14106 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG']._serialized_start=14109 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG']._serialized_end=14257 + _globals['_PARSEINLINENODEERROR']._serialized_start=14259 + _globals['_PARSEINLINENODEERROR']._serialized_end=14336 + _globals['_PARSEINLINENODEERRORMSG']._serialized_start=14338 + _globals['_PARSEINLINENODEERRORMSG']._serialized_end=14454 + _globals['_SEMANTICVALIDATIONFAILURE']._serialized_start=14456 + _globals['_SEMANTICVALIDATIONFAILURE']._serialized_end=14496 + _globals['_SEMANTICVALIDATIONFAILUREMSG']._serialized_start=14498 + _globals['_SEMANTICVALIDATIONFAILUREMSG']._serialized_end=14624 + _globals['_UNVERSIONEDBREAKINGCHANGE']._serialized_start=14627 + _globals['_UNVERSIONEDBREAKINGCHANGE']._serialized_end=15021 + _globals['_UNVERSIONEDBREAKINGCHANGEMSG']._serialized_start=15023 + _globals['_UNVERSIONEDBREAKINGCHANGEMSG']._serialized_end=15149 + _globals['_WARNSTATETARGETEQUAL']._serialized_start=15151 + _globals['_WARNSTATETARGETEQUAL']._serialized_end=15193 + _globals['_WARNSTATETARGETEQUALMSG']._serialized_start=15195 + _globals['_WARNSTATETARGETEQUALMSG']._serialized_end=15311 + _globals['_FRESHNESSCONFIGPROBLEM']._serialized_start=15313 + _globals['_FRESHNESSCONFIGPROBLEM']._serialized_end=15350 + _globals['_FRESHNESSCONFIGPROBLEMMSG']._serialized_start=15352 + _globals['_FRESHNESSCONFIGPROBLEMMSG']._serialized_end=15472 + _globals['_GITSPARSECHECKOUTSUBDIRECTORY']._serialized_start=15474 + _globals['_GITSPARSECHECKOUTSUBDIRECTORY']._serialized_end=15521 + _globals['_GITSPARSECHECKOUTSUBDIRECTORYMSG']._serialized_start=15524 + _globals['_GITSPARSECHECKOUTSUBDIRECTORYMSG']._serialized_end=15658 + _globals['_GITPROGRESSCHECKOUTREVISION']._serialized_start=15660 + _globals['_GITPROGRESSCHECKOUTREVISION']._serialized_end=15707 + _globals['_GITPROGRESSCHECKOUTREVISIONMSG']._serialized_start=15710 + _globals['_GITPROGRESSCHECKOUTREVISIONMSG']._serialized_end=15840 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCY']._serialized_start=15842 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCY']._serialized_end=15894 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG']._serialized_start=15897 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG']._serialized_end=16047 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCY']._serialized_start=16049 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCY']._serialized_end=16095 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCYMSG']._serialized_start=16098 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCYMSG']._serialized_end=16236 + _globals['_GITNOTHINGTODO']._serialized_start=16238 + _globals['_GITNOTHINGTODO']._serialized_end=16267 + _globals['_GITNOTHINGTODOMSG']._serialized_start=16269 + _globals['_GITNOTHINGTODOMSG']._serialized_end=16373 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGE']._serialized_start=16375 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGE']._serialized_end=16444 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGEMSG']._serialized_start=16447 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGEMSG']._serialized_end=16585 + _globals['_GITPROGRESSCHECKEDOUTAT']._serialized_start=16587 + _globals['_GITPROGRESSCHECKEDOUTAT']._serialized_end=16629 + _globals['_GITPROGRESSCHECKEDOUTATMSG']._serialized_start=16631 + _globals['_GITPROGRESSCHECKEDOUTATMSG']._serialized_end=16753 + _globals['_REGISTRYPROGRESSGETREQUEST']._serialized_start=16755 + _globals['_REGISTRYPROGRESSGETREQUEST']._serialized_end=16796 + _globals['_REGISTRYPROGRESSGETREQUESTMSG']._serialized_start=16799 + _globals['_REGISTRYPROGRESSGETREQUESTMSG']._serialized_end=16927 + _globals['_REGISTRYPROGRESSGETRESPONSE']._serialized_start=16929 + _globals['_REGISTRYPROGRESSGETRESPONSE']._serialized_end=16990 + _globals['_REGISTRYPROGRESSGETRESPONSEMSG']._serialized_start=16993 + _globals['_REGISTRYPROGRESSGETRESPONSEMSG']._serialized_end=17123 + _globals['_SELECTORREPORTINVALIDSELECTOR']._serialized_start=17125 + _globals['_SELECTORREPORTINVALIDSELECTOR']._serialized_end=17220 + _globals['_SELECTORREPORTINVALIDSELECTORMSG']._serialized_start=17223 + _globals['_SELECTORREPORTINVALIDSELECTORMSG']._serialized_end=17357 + _globals['_DEPSNOPACKAGESFOUND']._serialized_start=17359 + _globals['_DEPSNOPACKAGESFOUND']._serialized_end=17380 + _globals['_DEPSNOPACKAGESFOUNDMSG']._serialized_start=17382 + _globals['_DEPSNOPACKAGESFOUNDMSG']._serialized_end=17496 + _globals['_DEPSSTARTPACKAGEINSTALL']._serialized_start=17498 + _globals['_DEPSSTARTPACKAGEINSTALL']._serialized_end=17545 + _globals['_DEPSSTARTPACKAGEINSTALLMSG']._serialized_start=17547 + _globals['_DEPSSTARTPACKAGEINSTALLMSG']._serialized_end=17669 + _globals['_DEPSINSTALLINFO']._serialized_start=17671 + _globals['_DEPSINSTALLINFO']._serialized_end=17710 + _globals['_DEPSINSTALLINFOMSG']._serialized_start=17712 + _globals['_DEPSINSTALLINFOMSG']._serialized_end=17818 + _globals['_DEPSUPDATEAVAILABLE']._serialized_start=17820 + _globals['_DEPSUPDATEAVAILABLE']._serialized_end=17865 + _globals['_DEPSUPDATEAVAILABLEMSG']._serialized_start=17867 + _globals['_DEPSUPDATEAVAILABLEMSG']._serialized_end=17981 + _globals['_DEPSUPTODATE']._serialized_start=17983 + _globals['_DEPSUPTODATE']._serialized_end=17997 + _globals['_DEPSUPTODATEMSG']._serialized_start=17999 + _globals['_DEPSUPTODATEMSG']._serialized_end=18099 + _globals['_DEPSLISTSUBDIRECTORY']._serialized_start=18101 + _globals['_DEPSLISTSUBDIRECTORY']._serialized_end=18145 + _globals['_DEPSLISTSUBDIRECTORYMSG']._serialized_start=18147 + _globals['_DEPSLISTSUBDIRECTORYMSG']._serialized_end=18263 + _globals['_DEPSNOTIFYUPDATESAVAILABLE']._serialized_start=18265 + _globals['_DEPSNOTIFYUPDATESAVAILABLE']._serialized_end=18311 + _globals['_DEPSNOTIFYUPDATESAVAILABLEMSG']._serialized_start=18314 + _globals['_DEPSNOTIFYUPDATESAVAILABLEMSG']._serialized_end=18442 + _globals['_REGISTRYINDEXPROGRESSGETREQUEST']._serialized_start=18444 + _globals['_REGISTRYINDEXPROGRESSGETREQUEST']._serialized_end=18490 + _globals['_REGISTRYINDEXPROGRESSGETREQUESTMSG']._serialized_start=18493 + _globals['_REGISTRYINDEXPROGRESSGETREQUESTMSG']._serialized_end=18631 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSE']._serialized_start=18633 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSE']._serialized_end=18699 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSEMSG']._serialized_start=18702 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSEMSG']._serialized_end=18842 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPE']._serialized_start=18844 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPE']._serialized_end=18894 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPEMSG']._serialized_start=18897 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPEMSG']._serialized_end=19033 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYS']._serialized_start=19035 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYS']._serialized_end=19085 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYSMSG']._serialized_start=19088 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYSMSG']._serialized_end=19224 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYS']._serialized_start=19226 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYS']._serialized_end=19279 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYSMSG']._serialized_start=19282 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYSMSG']._serialized_end=19424 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYS']._serialized_start=19426 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYS']._serialized_end=19477 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYSMSG']._serialized_start=19480 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYSMSG']._serialized_end=19618 + _globals['_DEPSSETDOWNLOADDIRECTORY']._serialized_start=19620 + _globals['_DEPSSETDOWNLOADDIRECTORY']._serialized_end=19660 + _globals['_DEPSSETDOWNLOADDIRECTORYMSG']._serialized_start=19662 + _globals['_DEPSSETDOWNLOADDIRECTORYMSG']._serialized_end=19786 + _globals['_DEPSUNPINNED']._serialized_start=19788 + _globals['_DEPSUNPINNED']._serialized_end=19833 + _globals['_DEPSUNPINNEDMSG']._serialized_start=19835 + _globals['_DEPSUNPINNEDMSG']._serialized_end=19935 + _globals['_NONODESFORSELECTIONCRITERIA']._serialized_start=19937 + _globals['_NONODESFORSELECTIONCRITERIA']._serialized_end=19984 + _globals['_NONODESFORSELECTIONCRITERIAMSG']._serialized_start=19987 + _globals['_NONODESFORSELECTIONCRITERIAMSG']._serialized_end=20117 + _globals['_DEPSLOCKUPDATING']._serialized_start=20119 + _globals['_DEPSLOCKUPDATING']._serialized_end=20160 + _globals['_DEPSLOCKUPDATINGMSG']._serialized_start=20162 + _globals['_DEPSLOCKUPDATINGMSG']._serialized_end=20270 + _globals['_DEPSADDPACKAGE']._serialized_start=20272 + _globals['_DEPSADDPACKAGE']._serialized_end=20354 + _globals['_DEPSADDPACKAGEMSG']._serialized_start=20356 + _globals['_DEPSADDPACKAGEMSG']._serialized_end=20460 + _globals['_DEPSFOUNDDUPLICATEPACKAGE']._serialized_start=20463 + _globals['_DEPSFOUNDDUPLICATEPACKAGE']._serialized_end=20630 + _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_start=20577 + _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_end=20630 + _globals['_DEPSFOUNDDUPLICATEPACKAGEMSG']._serialized_start=20632 + _globals['_DEPSFOUNDDUPLICATEPACKAGEMSG']._serialized_end=20758 + _globals['_DEPSVERSIONMISSING']._serialized_start=20760 + _globals['_DEPSVERSIONMISSING']._serialized_end=20796 + _globals['_DEPSVERSIONMISSINGMSG']._serialized_start=20798 + _globals['_DEPSVERSIONMISSINGMSG']._serialized_end=20910 + _globals['_DEPSSCRUBBEDPACKAGENAME']._serialized_start=20912 + _globals['_DEPSSCRUBBEDPACKAGENAME']._serialized_end=20959 + _globals['_DEPSSCRUBBEDPACKAGENAMEMSG']._serialized_start=20961 + _globals['_DEPSSCRUBBEDPACKAGENAMEMSG']._serialized_end=21083 + _globals['_RUNNINGOPERATIONCAUGHTERROR']._serialized_start=21085 + _globals['_RUNNINGOPERATIONCAUGHTERROR']._serialized_end=21127 + _globals['_RUNNINGOPERATIONCAUGHTERRORMSG']._serialized_start=21130 + _globals['_RUNNINGOPERATIONCAUGHTERRORMSG']._serialized_end=21260 + _globals['_COMPILECOMPLETE']._serialized_start=21262 + _globals['_COMPILECOMPLETE']._serialized_end=21279 + _globals['_COMPILECOMPLETEMSG']._serialized_start=21281 + _globals['_COMPILECOMPLETEMSG']._serialized_end=21387 + _globals['_FRESHNESSCHECKCOMPLETE']._serialized_start=21389 + _globals['_FRESHNESSCHECKCOMPLETE']._serialized_end=21413 + _globals['_FRESHNESSCHECKCOMPLETEMSG']._serialized_start=21415 + _globals['_FRESHNESSCHECKCOMPLETEMSG']._serialized_end=21535 + _globals['_SEEDHEADER']._serialized_start=21537 + _globals['_SEEDHEADER']._serialized_end=21565 + _globals['_SEEDHEADERMSG']._serialized_start=21567 + _globals['_SEEDHEADERMSG']._serialized_end=21663 + _globals['_SQLRUNNEREXCEPTION']._serialized_start=21665 + _globals['_SQLRUNNEREXCEPTION']._serialized_end=21758 + _globals['_SQLRUNNEREXCEPTIONMSG']._serialized_start=21760 + _globals['_SQLRUNNEREXCEPTIONMSG']._serialized_end=21872 + _globals['_LOGTESTRESULT']._serialized_start=21875 + _globals['_LOGTESTRESULT']._serialized_end=22043 + _globals['_LOGTESTRESULTMSG']._serialized_start=22045 + _globals['_LOGTESTRESULTMSG']._serialized_end=22147 + _globals['_LOGSTARTLINE']._serialized_start=22149 + _globals['_LOGSTARTLINE']._serialized_end=22256 + _globals['_LOGSTARTLINEMSG']._serialized_start=22258 + _globals['_LOGSTARTLINEMSG']._serialized_end=22358 + _globals['_LOGMODELRESULT']._serialized_start=22361 + _globals['_LOGMODELRESULT']._serialized_end=22510 + _globals['_LOGMODELRESULTMSG']._serialized_start=22512 + _globals['_LOGMODELRESULTMSG']._serialized_end=22616 + _globals['_LOGSNAPSHOTRESULT']._serialized_start=22619 + _globals['_LOGSNAPSHOTRESULT']._serialized_end=22893 + _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_start=22851 + _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_end=22893 + _globals['_LOGSNAPSHOTRESULTMSG']._serialized_start=22895 + _globals['_LOGSNAPSHOTRESULTMSG']._serialized_end=23005 + _globals['_LOGSEEDRESULT']._serialized_start=23008 + _globals['_LOGSEEDRESULT']._serialized_end=23193 + _globals['_LOGSEEDRESULTMSG']._serialized_start=23195 + _globals['_LOGSEEDRESULTMSG']._serialized_end=23297 + _globals['_LOGFRESHNESSRESULT']._serialized_start=23300 + _globals['_LOGFRESHNESSRESULT']._serialized_end=23473 + _globals['_LOGFRESHNESSRESULTMSG']._serialized_start=23475 + _globals['_LOGFRESHNESSRESULTMSG']._serialized_end=23587 + _globals['_LOGNODENOOPRESULT']._serialized_start=23590 + _globals['_LOGNODENOOPRESULT']._serialized_end=23742 + _globals['_LOGNODENOOPRESULTMSG']._serialized_start=23744 + _globals['_LOGNODENOOPRESULTMSG']._serialized_end=23854 + _globals['_LOGCANCELLINE']._serialized_start=23856 + _globals['_LOGCANCELLINE']._serialized_end=23890 + _globals['_LOGCANCELLINEMSG']._serialized_start=23892 + _globals['_LOGCANCELLINEMSG']._serialized_end=23994 + _globals['_DEFAULTSELECTOR']._serialized_start=23996 + _globals['_DEFAULTSELECTOR']._serialized_end=24027 + _globals['_DEFAULTSELECTORMSG']._serialized_start=24029 + _globals['_DEFAULTSELECTORMSG']._serialized_end=24135 + _globals['_NODESTART']._serialized_start=24137 + _globals['_NODESTART']._serialized_end=24190 + _globals['_NODESTARTMSG']._serialized_start=24192 + _globals['_NODESTARTMSG']._serialized_end=24286 + _globals['_NODEFINISHED']._serialized_start=24288 + _globals['_NODEFINISHED']._serialized_end=24391 + _globals['_NODEFINISHEDMSG']._serialized_start=24393 + _globals['_NODEFINISHEDMSG']._serialized_end=24493 + _globals['_QUERYCANCELATIONUNSUPPORTED']._serialized_start=24495 + _globals['_QUERYCANCELATIONUNSUPPORTED']._serialized_end=24538 + _globals['_QUERYCANCELATIONUNSUPPORTEDMSG']._serialized_start=24541 + _globals['_QUERYCANCELATIONUNSUPPORTEDMSG']._serialized_end=24671 + _globals['_CONCURRENCYLINE']._serialized_start=24673 + _globals['_CONCURRENCYLINE']._serialized_end=24752 + _globals['_CONCURRENCYLINEMSG']._serialized_start=24754 + _globals['_CONCURRENCYLINEMSG']._serialized_end=24860 + _globals['_WRITINGINJECTEDSQLFORNODE']._serialized_start=24862 + _globals['_WRITINGINJECTEDSQLFORNODE']._serialized_end=24931 + _globals['_WRITINGINJECTEDSQLFORNODEMSG']._serialized_start=24933 + _globals['_WRITINGINJECTEDSQLFORNODEMSG']._serialized_end=25059 + _globals['_NODECOMPILING']._serialized_start=25061 + _globals['_NODECOMPILING']._serialized_end=25118 + _globals['_NODECOMPILINGMSG']._serialized_start=25120 + _globals['_NODECOMPILINGMSG']._serialized_end=25222 + _globals['_NODEEXECUTING']._serialized_start=25224 + _globals['_NODEEXECUTING']._serialized_end=25281 + _globals['_NODEEXECUTINGMSG']._serialized_start=25283 + _globals['_NODEEXECUTINGMSG']._serialized_end=25385 + _globals['_LOGHOOKSTARTLINE']._serialized_start=25387 + _globals['_LOGHOOKSTARTLINE']._serialized_end=25496 + _globals['_LOGHOOKSTARTLINEMSG']._serialized_start=25498 + _globals['_LOGHOOKSTARTLINEMSG']._serialized_end=25606 + _globals['_LOGHOOKENDLINE']._serialized_start=25609 + _globals['_LOGHOOKENDLINE']._serialized_end=25756 + _globals['_LOGHOOKENDLINEMSG']._serialized_start=25758 + _globals['_LOGHOOKENDLINEMSG']._serialized_end=25862 + _globals['_SKIPPINGDETAILS']._serialized_start=25865 + _globals['_SKIPPINGDETAILS']._serialized_end=26012 + _globals['_SKIPPINGDETAILSMSG']._serialized_start=26014 + _globals['_SKIPPINGDETAILSMSG']._serialized_end=26120 + _globals['_NOTHINGTODO']._serialized_start=26122 + _globals['_NOTHINGTODO']._serialized_end=26135 + _globals['_NOTHINGTODOMSG']._serialized_start=26137 + _globals['_NOTHINGTODOMSG']._serialized_end=26235 + _globals['_RUNNINGOPERATIONUNCAUGHTERROR']._serialized_start=26237 + _globals['_RUNNINGOPERATIONUNCAUGHTERROR']._serialized_end=26281 + _globals['_RUNNINGOPERATIONUNCAUGHTERRORMSG']._serialized_start=26284 + _globals['_RUNNINGOPERATIONUNCAUGHTERRORMSG']._serialized_end=26418 + _globals['_ENDRUNRESULT']._serialized_start=26421 + _globals['_ENDRUNRESULT']._serialized_end=26568 + _globals['_ENDRUNRESULTMSG']._serialized_start=26570 + _globals['_ENDRUNRESULTMSG']._serialized_end=26670 + _globals['_NONODESSELECTED']._serialized_start=26672 + _globals['_NONODESSELECTED']._serialized_end=26689 + _globals['_NONODESSELECTEDMSG']._serialized_start=26691 + _globals['_NONODESSELECTEDMSG']._serialized_end=26797 + _globals['_COMMANDCOMPLETED']._serialized_start=26799 + _globals['_COMMANDCOMPLETED']._serialized_end=26918 + _globals['_COMMANDCOMPLETEDMSG']._serialized_start=26920 + _globals['_COMMANDCOMPLETEDMSG']._serialized_end=27028 + _globals['_SHOWNODE']._serialized_start=27030 + _globals['_SHOWNODE']._serialized_end=27137 + _globals['_SHOWNODEMSG']._serialized_start=27139 + _globals['_SHOWNODEMSG']._serialized_end=27231 + _globals['_COMPILEDNODE']._serialized_start=27233 + _globals['_COMPILEDNODE']._serialized_end=27345 + _globals['_COMPILEDNODEMSG']._serialized_start=27347 + _globals['_COMPILEDNODEMSG']._serialized_end=27447 + _globals['_CATCHABLEEXCEPTIONONRUN']._serialized_start=27449 + _globals['_CATCHABLEEXCEPTIONONRUN']._serialized_end=27547 + _globals['_CATCHABLEEXCEPTIONONRUNMSG']._serialized_start=27549 + _globals['_CATCHABLEEXCEPTIONONRUNMSG']._serialized_end=27671 + _globals['_INTERNALERRORONRUN']._serialized_start=27673 + _globals['_INTERNALERRORONRUN']._serialized_end=27768 + _globals['_INTERNALERRORONRUNMSG']._serialized_start=27770 + _globals['_INTERNALERRORONRUNMSG']._serialized_end=27882 + _globals['_GENERICEXCEPTIONONRUN']._serialized_start=27884 + _globals['_GENERICEXCEPTIONONRUN']._serialized_end=28001 + _globals['_GENERICEXCEPTIONONRUNMSG']._serialized_start=28003 + _globals['_GENERICEXCEPTIONONRUNMSG']._serialized_end=28121 + _globals['_NODECONNECTIONRELEASEERROR']._serialized_start=28123 + _globals['_NODECONNECTIONRELEASEERROR']._serialized_end=28201 + _globals['_NODECONNECTIONRELEASEERRORMSG']._serialized_start=28204 + _globals['_NODECONNECTIONRELEASEERRORMSG']._serialized_end=28332 + _globals['_FOUNDSTATS']._serialized_start=28334 + _globals['_FOUNDSTATS']._serialized_end=28365 + _globals['_FOUNDSTATSMSG']._serialized_start=28367 + _globals['_FOUNDSTATSMSG']._serialized_end=28463 + _globals['_MAINKEYBOARDINTERRUPT']._serialized_start=28465 + _globals['_MAINKEYBOARDINTERRUPT']._serialized_end=28488 + _globals['_MAINKEYBOARDINTERRUPTMSG']._serialized_start=28490 + _globals['_MAINKEYBOARDINTERRUPTMSG']._serialized_end=28608 + _globals['_MAINENCOUNTEREDERROR']._serialized_start=28610 + _globals['_MAINENCOUNTEREDERROR']._serialized_end=28645 + _globals['_MAINENCOUNTEREDERRORMSG']._serialized_start=28647 + _globals['_MAINENCOUNTEREDERRORMSG']._serialized_end=28763 + _globals['_MAINSTACKTRACE']._serialized_start=28765 + _globals['_MAINSTACKTRACE']._serialized_end=28802 + _globals['_MAINSTACKTRACEMSG']._serialized_start=28804 + _globals['_MAINSTACKTRACEMSG']._serialized_end=28908 + _globals['_TIMINGINFOCOLLECTED']._serialized_start=28910 + _globals['_TIMINGINFOCOLLECTED']._serialized_end=29022 + _globals['_TIMINGINFOCOLLECTEDMSG']._serialized_start=29024 + _globals['_TIMINGINFOCOLLECTEDMSG']._serialized_end=29138 + _globals['_LOGDEBUGSTACKTRACE']._serialized_start=29140 + _globals['_LOGDEBUGSTACKTRACE']._serialized_end=29178 + _globals['_LOGDEBUGSTACKTRACEMSG']._serialized_start=29180 + _globals['_LOGDEBUGSTACKTRACEMSG']._serialized_end=29292 + _globals['_CHECKCLEANPATH']._serialized_start=29294 + _globals['_CHECKCLEANPATH']._serialized_end=29324 + _globals['_CHECKCLEANPATHMSG']._serialized_start=29326 + _globals['_CHECKCLEANPATHMSG']._serialized_end=29430 + _globals['_CONFIRMCLEANPATH']._serialized_start=29432 + _globals['_CONFIRMCLEANPATH']._serialized_end=29464 + _globals['_CONFIRMCLEANPATHMSG']._serialized_start=29466 + _globals['_CONFIRMCLEANPATHMSG']._serialized_end=29574 + _globals['_PROTECTEDCLEANPATH']._serialized_start=29576 + _globals['_PROTECTEDCLEANPATH']._serialized_end=29610 + _globals['_PROTECTEDCLEANPATHMSG']._serialized_start=29612 + _globals['_PROTECTEDCLEANPATHMSG']._serialized_end=29724 + _globals['_FINISHEDCLEANPATHS']._serialized_start=29726 + _globals['_FINISHEDCLEANPATHS']._serialized_end=29746 + _globals['_FINISHEDCLEANPATHSMSG']._serialized_start=29748 + _globals['_FINISHEDCLEANPATHSMSG']._serialized_end=29860 + _globals['_OPENCOMMAND']._serialized_start=29862 + _globals['_OPENCOMMAND']._serialized_end=29915 + _globals['_OPENCOMMANDMSG']._serialized_start=29917 + _globals['_OPENCOMMANDMSG']._serialized_end=30015 + _globals['_SERVINGDOCSPORT']._serialized_start=30017 + _globals['_SERVINGDOCSPORT']._serialized_end=30065 + _globals['_SERVINGDOCSPORTMSG']._serialized_start=30067 + _globals['_SERVINGDOCSPORTMSG']._serialized_end=30173 + _globals['_SERVINGDOCSACCESSINFO']._serialized_start=30175 + _globals['_SERVINGDOCSACCESSINFO']._serialized_end=30212 + _globals['_SERVINGDOCSACCESSINFOMSG']._serialized_start=30214 + _globals['_SERVINGDOCSACCESSINFOMSG']._serialized_end=30332 + _globals['_SERVINGDOCSEXITINFO']._serialized_start=30334 + _globals['_SERVINGDOCSEXITINFO']._serialized_end=30355 + _globals['_SERVINGDOCSEXITINFOMSG']._serialized_start=30357 + _globals['_SERVINGDOCSEXITINFOMSG']._serialized_end=30471 + _globals['_RUNRESULTWARNING']._serialized_start=30473 + _globals['_RUNRESULTWARNING']._serialized_end=30589 + _globals['_RUNRESULTWARNINGMSG']._serialized_start=30591 + _globals['_RUNRESULTWARNINGMSG']._serialized_end=30699 + _globals['_RUNRESULTFAILURE']._serialized_start=30701 + _globals['_RUNRESULTFAILURE']._serialized_end=30817 + _globals['_RUNRESULTFAILUREMSG']._serialized_start=30819 + _globals['_RUNRESULTFAILUREMSG']._serialized_end=30927 + _globals['_STATSLINE']._serialized_start=30929 + _globals['_STATSLINE']._serialized_end=31036 + _globals['_STATSLINE_STATSENTRY']._serialized_start=30992 + _globals['_STATSLINE_STATSENTRY']._serialized_end=31036 + _globals['_STATSLINEMSG']._serialized_start=31038 + _globals['_STATSLINEMSG']._serialized_end=31132 + _globals['_RUNRESULTERROR']._serialized_start=31134 + _globals['_RUNRESULTERROR']._serialized_end=31205 + _globals['_RUNRESULTERRORMSG']._serialized_start=31207 + _globals['_RUNRESULTERRORMSG']._serialized_end=31311 + _globals['_RUNRESULTERRORNOMESSAGE']._serialized_start=31313 + _globals['_RUNRESULTERRORNOMESSAGE']._serialized_end=31396 + _globals['_RUNRESULTERRORNOMESSAGEMSG']._serialized_start=31398 + _globals['_RUNRESULTERRORNOMESSAGEMSG']._serialized_end=31520 + _globals['_SQLCOMPILEDPATH']._serialized_start=31522 + _globals['_SQLCOMPILEDPATH']._serialized_end=31595 + _globals['_SQLCOMPILEDPATHMSG']._serialized_start=31597 + _globals['_SQLCOMPILEDPATHMSG']._serialized_end=31703 + _globals['_CHECKNODETESTFAILURE']._serialized_start=31705 + _globals['_CHECKNODETESTFAILURE']._serialized_end=31792 + _globals['_CHECKNODETESTFAILUREMSG']._serialized_start=31794 + _globals['_CHECKNODETESTFAILUREMSG']._serialized_end=31910 + _globals['_ENDOFRUNSUMMARY']._serialized_start=31912 + _globals['_ENDOFRUNSUMMARY']._serialized_end=31999 + _globals['_ENDOFRUNSUMMARYMSG']._serialized_start=32001 + _globals['_ENDOFRUNSUMMARYMSG']._serialized_end=32107 + _globals['_LOGSKIPBECAUSEERROR']._serialized_start=32109 + _globals['_LOGSKIPBECAUSEERROR']._serialized_end=32194 + _globals['_LOGSKIPBECAUSEERRORMSG']._serialized_start=32196 + _globals['_LOGSKIPBECAUSEERRORMSG']._serialized_end=32310 + _globals['_ENSUREGITINSTALLED']._serialized_start=32312 + _globals['_ENSUREGITINSTALLED']._serialized_end=32332 + _globals['_ENSUREGITINSTALLEDMSG']._serialized_start=32334 + _globals['_ENSUREGITINSTALLEDMSG']._serialized_end=32446 + _globals['_DEPSCREATINGLOCALSYMLINK']._serialized_start=32448 + _globals['_DEPSCREATINGLOCALSYMLINK']._serialized_end=32474 + _globals['_DEPSCREATINGLOCALSYMLINKMSG']._serialized_start=32476 + _globals['_DEPSCREATINGLOCALSYMLINKMSG']._serialized_end=32600 + _globals['_DEPSSYMLINKNOTAVAILABLE']._serialized_start=32602 + _globals['_DEPSSYMLINKNOTAVAILABLE']._serialized_end=32627 + _globals['_DEPSSYMLINKNOTAVAILABLEMSG']._serialized_start=32629 + _globals['_DEPSSYMLINKNOTAVAILABLEMSG']._serialized_end=32751 + _globals['_DISABLETRACKING']._serialized_start=32753 + _globals['_DISABLETRACKING']._serialized_end=32770 + _globals['_DISABLETRACKINGMSG']._serialized_start=32772 + _globals['_DISABLETRACKINGMSG']._serialized_end=32878 + _globals['_SENDINGEVENT']._serialized_start=32880 + _globals['_SENDINGEVENT']._serialized_end=32910 + _globals['_SENDINGEVENTMSG']._serialized_start=32912 + _globals['_SENDINGEVENTMSG']._serialized_end=33012 + _globals['_SENDEVENTFAILURE']._serialized_start=33014 + _globals['_SENDEVENTFAILURE']._serialized_end=33032 + _globals['_SENDEVENTFAILUREMSG']._serialized_start=33034 + _globals['_SENDEVENTFAILUREMSG']._serialized_end=33142 + _globals['_FLUSHEVENTS']._serialized_start=33144 + _globals['_FLUSHEVENTS']._serialized_end=33157 + _globals['_FLUSHEVENTSMSG']._serialized_start=33159 + _globals['_FLUSHEVENTSMSG']._serialized_end=33257 + _globals['_FLUSHEVENTSFAILURE']._serialized_start=33259 + _globals['_FLUSHEVENTSFAILURE']._serialized_end=33279 + _globals['_FLUSHEVENTSFAILUREMSG']._serialized_start=33281 + _globals['_FLUSHEVENTSFAILUREMSG']._serialized_end=33393 + _globals['_TRACKINGINITIALIZEFAILURE']._serialized_start=33395 + _globals['_TRACKINGINITIALIZEFAILURE']._serialized_end=33440 + _globals['_TRACKINGINITIALIZEFAILUREMSG']._serialized_start=33442 + _globals['_TRACKINGINITIALIZEFAILUREMSG']._serialized_end=33568 + _globals['_RUNRESULTWARNINGMESSAGE']._serialized_start=33570 + _globals['_RUNRESULTWARNINGMESSAGE']._serialized_end=33650 + _globals['_RUNRESULTWARNINGMESSAGEMSG']._serialized_start=33652 + _globals['_RUNRESULTWARNINGMESSAGEMSG']._serialized_end=33774 + _globals['_DEBUGCMDOUT']._serialized_start=33776 + _globals['_DEBUGCMDOUT']._serialized_end=33802 + _globals['_DEBUGCMDOUTMSG']._serialized_start=33804 + _globals['_DEBUGCMDOUTMSG']._serialized_end=33902 + _globals['_DEBUGCMDRESULT']._serialized_start=33904 + _globals['_DEBUGCMDRESULT']._serialized_end=33933 + _globals['_DEBUGCMDRESULTMSG']._serialized_start=33935 + _globals['_DEBUGCMDRESULTMSG']._serialized_end=34039 + _globals['_LISTCMDOUT']._serialized_start=34041 + _globals['_LISTCMDOUT']._serialized_end=34066 + _globals['_LISTCMDOUTMSG']._serialized_start=34068 + _globals['_LISTCMDOUTMSG']._serialized_end=34164 + _globals['_RESOURCEREPORT']._serialized_start=34167 + _globals['_RESOURCEREPORT']._serialized_end=34403 + _globals['_RESOURCEREPORTMSG']._serialized_start=34405 + _globals['_RESOURCEREPORTMSG']._serialized_end=34509 # @@protoc_insertion_point(module_scope) diff --git a/core/dbt/events/logging.py b/core/dbt/events/logging.py index 8b46908d7ef..68f2b2a0943 100644 --- a/core/dbt/events/logging.py +++ b/core/dbt/events/logging.py @@ -1,21 +1,21 @@ import os from functools import partial -from typing import List, Callable +from typing import Callable, List -from dbt_common.events.base_types import EventMsg, EventLevel +from dbt_common.events.base_types import EventLevel, EventMsg from dbt_common.events.event_manager_client import ( + add_logger_to_manager, cleanup_event_logger, get_event_manager, - add_logger_to_manager, ) from dbt_common.events.functions import ( - make_log_dir_if_missing, env_scrubber, - get_stdout_config, get_capture_stream, + get_stdout_config, + make_log_dir_if_missing, ) -from dbt_common.invocation import get_invocation_id from dbt_common.events.logger import LineFormat, LoggerConfig +from dbt_common.invocation import get_invocation_id # These are the logging events issued by the "clean" command, # where we can't count on having a log directory. We've removed diff --git a/core/dbt/events/types.py b/core/dbt/events/types.py index 01b46cc54bf..6a00a237b90 100644 --- a/core/dbt/events/types.py +++ b/core/dbt/events/types.py @@ -1,15 +1,20 @@ import json from dbt.constants import MAXIMUM_SEED_SIZE_NAME, PIN_PACKAGE_URL -from dbt_common.ui import error_tag, warning_tag, line_wrap_message, green, yellow, red +from dbt.events.base_types import ( + DebugLevel, + DynamicLevel, + ErrorLevel, + InfoLevel, + WarnLevel, +) from dbt_common.events.base_types import EventLevel from dbt_common.events.format import ( format_fancy_output_line, - timestamp_to_datetime_string, pluralize, + timestamp_to_datetime_string, ) -from dbt.events.base_types import WarnLevel, InfoLevel, DebugLevel, ErrorLevel, DynamicLevel - +from dbt_common.ui import error_tag, green, line_wrap_message, red, warning_tag, yellow # Event codes have prefixes which follow this table # @@ -57,12 +62,7 @@ def message(self) -> str: return f"Tracking: {self.user_state}" -class MergedFromState(DebugLevel): - def code(self) -> str: - return "A004" - - def message(self) -> str: - return f"Merged {self.num_merged} items from state (sample: {self.sample})" +# Removed A004: MergedFromState class MissingProfileTarget(InfoLevel): @@ -413,16 +413,12 @@ def message(self) -> str: return warning_tag(f"Deprecated functionality\n\n{description}") -class SpacesInModelNameDeprecation(DynamicLevel): +class SpacesInResourceNameDeprecation(DynamicLevel): def code(self) -> str: return "D014" def message(self) -> str: - version = ".v" + self.model_version if self.model_version else "" - description = ( - f"Model `{self.model_name}{version}` has spaces in its name. This is deprecated and " - "may cause errors when using dbt." - ) + description = f"Found spaces in the name of `{self.unique_id}`" if self.level == EventLevel.ERROR.value: description = error_tag(description) @@ -432,22 +428,39 @@ def message(self) -> str: return line_wrap_message(description) -class TotalModelNamesWithSpacesDeprecation(DynamicLevel): +class ResourceNamesWithSpacesDeprecation(WarnLevel): def code(self) -> str: return "D015" def message(self) -> str: - description = f"Spaces in model names found in {self.count_invalid_names} model(s), which is deprecated." + description = f"Spaces found in {self.count_invalid_names} resource name(s). This is deprecated, and may lead to errors when using dbt." if self.show_debug_hint: description += " Run again with `--debug` to see them all." - if self.level == EventLevel.ERROR.value: - description = error_tag(description) - elif self.level == EventLevel.WARN.value: - description = warning_tag(description) + description += " For more information: https://docs.getdbt.com/reference/global-configs/legacy-behaviors" - return line_wrap_message(description) + return line_wrap_message(warning_tag(description)) + + +class PackageMaterializationOverrideDeprecation(WarnLevel): + def code(self) -> str: + return "D016" + + def message(self) -> str: + description = f"Installed package '{self.package_name}' is overriding the built-in materialization '{self.materialization_name}'. Overrides of built-in materializations from installed packages will be deprecated in future versions of dbt. For more information: https://docs.getdbt.com/reference/global-configs/legacy-behaviors" + + return line_wrap_message(warning_tag(description)) + + +class SourceFreshnessProjectHooksNotRun(WarnLevel): + def code(self) -> str: + return "D017" + + def message(self) -> str: + description = "In a future version of dbt, the `source freshness` command will start running `on-run-start` and `on-run-end` hooks by default. For more information: https://docs.getdbt.com/reference/global-configs/legacy-behaviors" + + return line_wrap_message(warning_tag(description)) # ======================================================= @@ -1809,7 +1822,7 @@ def code(self) -> str: return "Z026" def message(self) -> str: - return f" compiled Code at {self.path}" + return f" compiled code at {self.path}" class CheckNodeTestFailure(InfoLevel): @@ -1969,6 +1982,7 @@ def message(self) -> str: class ListCmdOut(InfoLevel): + # No longer in use, switching to Z051 PrintEvent in dbt-common def code(self) -> str: return "Z049" diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py index 721e65b4c27..aec2b5e3826 100644 --- a/core/dbt/exceptions.py +++ b/core/dbt/exceptions.py @@ -1,24 +1,21 @@ +import io import json import re -import io -from typing import Any, Dict, List, Mapping, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union +from dbt.node_types import REFABLE_NODE_TYPES, AccessType, NodeType +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.dataclass_schema import ValidationError from dbt_common.exceptions import ( - DbtRuntimeError, + CommandResultError, CompilationError, - DbtInternalError, DbtConfigError, + DbtInternalError, + DbtRuntimeError, + DbtValidationError, env_secrets, scrub_secrets, - DbtValidationError, - CommandResultError, ) -from dbt.node_types import NodeType, AccessType, REFABLE_NODE_TYPES - -from dbt_common.dataclass_schema import ValidationError - -from dbt.constants import SECRET_ENV_PREFIX - if TYPE_CHECKING: import agate @@ -113,6 +110,10 @@ class DbtProfileError(DbtConfigError): pass +class DbtExclusivePropertyUseError(DbtConfigError): + pass + + class InvalidSelectorError(DbtRuntimeError): def __init__(self, name: str) -> None: self.name = name diff --git a/core/dbt/flags.py b/core/dbt/flags.py index 7deb8966013..bcacf5d3e8f 100644 --- a/core/dbt/flags.py +++ b/core/dbt/flags.py @@ -1,25 +1,7 @@ # Do not import the os package because we expose this package in jinja -from os import getenv as os_getenv from argparse import Namespace -from typing import Optional from pathlib import Path - -# for setting up logger for legacy logger -def env_set_truthy(key: str) -> Optional[str]: - """Return the value if it was set to a "truthy" string value or None - otherwise. - """ - value = os_getenv(key) - if not value or value.lower() in ("0", "false", "f"): - return None - return value - - -# for setting up logger for legacy logger -ENABLE_LEGACY_LOGGER = env_set_truthy("DBT_ENABLE_LEGACY_LOGGER") - - # this roughly follows the patten of EVENT_MANAGER in dbt/common/events/functions.py # During de-globlization, we'll need to handle both similarly # Match USE_COLORS default with default in dbt.cli.params.use_colors for use in --version @@ -37,8 +19,8 @@ def get_flags(): def set_from_args(args: Namespace, project_flags): global GLOBAL_FLAGS - from dbt.cli.main import cli from dbt.cli.flags import Flags, convert_config + from dbt.cli.main import cli # we set attributes of args after initialize the flags, but project_flags # is being read in the Flags constructor, so we need to read it here and pass in diff --git a/core/dbt/graph/README.md b/core/dbt/graph/README.md index 61bfd614a18..1daa9d9fce8 100644 --- a/core/dbt/graph/README.md +++ b/core/dbt/graph/README.md @@ -1 +1,9 @@ # Graph README + +## Graph Selector Creation + +### Selector Loading +During dbt execution, the `@requires.project` decorator creates the final selector objects used in the graph. The `SelectorConfig` class loads selectors from the project configuration, while the `selector_config_from_data` function parses these selectors. + +#### Indirect Selection Default Value +In `@requires.preflight`, dbt reads CLI flags, environment variables, and the parameter's default value. It resolves these inputs based on their precedence order and stores the resolved value in global flags. When loading selectors, the [`selection_criteria_from_dict`](https://github.com/dbt-labs/dbt-core/blob/b316c5f18021fef3d7fd6ec255427054b7d2205e/core/dbt/graph/selector_spec.py#L111) function resolves the indirect selection value to the global flags value if not set. This ensures correct resolution of the indirect selection value. diff --git a/core/dbt/graph/__init__.py b/core/dbt/graph/__init__.py index 67d979cc0fb..f89b1edfc69 100644 --- a/core/dbt/graph/__init__.py +++ b/core/dbt/graph/__init__.py @@ -1,17 +1,11 @@ +from .cli import parse_difference, parse_from_selectors_definition # noqa: F401 +from .graph import Graph, UniqueId # noqa: F401 +from .queue import GraphQueue # noqa: F401 +from .selector import NodeSelector, ResourceTypeSelector # noqa: F401 from .selector_spec import ( # noqa: F401 - SelectionUnion, - SelectionSpec, - SelectionIntersection, - SelectionDifference, SelectionCriteria, + SelectionDifference, + SelectionIntersection, + SelectionSpec, + SelectionUnion, ) -from .selector import ( # noqa: F401 - ResourceTypeSelector, - NodeSelector, -) -from .cli import ( # noqa: F401 - parse_difference, - parse_from_selectors_definition, -) -from .queue import GraphQueue # noqa: F401 -from .graph import Graph, UniqueId # noqa: F401 diff --git a/core/dbt/graph/cli.py b/core/dbt/graph/cli.py index 2ef4e918888..412ad54caae 100644 --- a/core/dbt/graph/cli.py +++ b/core/dbt/graph/cli.py @@ -1,22 +1,21 @@ # special support for CLI argument parsing. # TODO: Remove as part of https://github.com/dbt-labs/dbt-core/issues/6701 -from dbt.flags import get_flags -from copy import deepcopy import itertools -from dbt.clients.yaml_helper import yaml, Loader, Dumper # noqa: F401 - -from typing import Dict, List, Optional, Tuple, Any, Union +from copy import deepcopy +from typing import Any, Dict, List, Optional, Tuple, Union +from dbt.clients.yaml_helper import Dumper, Loader, yaml # noqa: F401 from dbt.contracts.selection import SelectorDefinition, SelectorFile +from dbt.flags import get_flags from dbt_common.exceptions import DbtInternalError, DbtValidationError from .selector_spec import ( - SelectionUnion, - SelectionSpec, - SelectionIntersection, - SelectionDifference, - SelectionCriteria, IndirectSelection, + SelectionCriteria, + SelectionDifference, + SelectionIntersection, + SelectionSpec, + SelectionUnion, ) INTERSECTION_DELIMITER = "," @@ -28,7 +27,6 @@ def parse_union( components: List[str], expect_exists: bool, - indirect_selection: IndirectSelection = IndirectSelection.Eager, ) -> SelectionUnion: # turn ['a b', 'c'] -> ['a', 'b', 'c'] raw_specs = itertools.chain.from_iterable(r.split(" ") for r in components) @@ -37,7 +35,7 @@ def parse_union( # ['a', 'b', 'c,d'] -> union('a', 'b', intersection('c', 'd')) for raw_spec in raw_specs: intersection_components: List[SelectionSpec] = [ - SelectionCriteria.from_single_spec(part, indirect_selection=indirect_selection) + SelectionCriteria.from_single_spec(part) for part in raw_spec.split(INTERSECTION_DELIMITER) ] union_components.append( @@ -56,41 +54,25 @@ def parse_union( ) -def parse_union_from_default( - raw: Optional[List[str]], - default: List[str], - indirect_selection: IndirectSelection = IndirectSelection.Eager, -) -> SelectionUnion: +def parse_union_from_default(raw: Optional[List[str]], default: List[str]) -> SelectionUnion: components: List[str] expect_exists: bool if raw is None: - return parse_union( - components=default, expect_exists=False, indirect_selection=indirect_selection - ) + return parse_union(components=default, expect_exists=False) else: - return parse_union( - components=raw, expect_exists=True, indirect_selection=indirect_selection - ) + return parse_union(components=raw, expect_exists=True) def parse_difference( - include: Optional[List[str]], exclude: Optional[List[str]], indirect_selection: Any + include: Optional[List[str]], exclude: Optional[List[str]] ) -> SelectionDifference: if include == (): include = None - included = parse_union_from_default( - include, DEFAULT_INCLUDES, indirect_selection=IndirectSelection(indirect_selection) - ) - flags = get_flags() - excluded = parse_union_from_default( - exclude, DEFAULT_EXCLUDES, indirect_selection=IndirectSelection(flags.INDIRECT_SELECTION) - ) - return SelectionDifference( - components=[included, excluded], - indirect_selection=IndirectSelection(flags.INDIRECT_SELECTION), - ) + included = parse_union_from_default(include, DEFAULT_INCLUDES) + excluded = parse_union_from_default(exclude, DEFAULT_EXCLUDES) + return SelectionDifference(components=[included, excluded]) RawDefinition = Union[str, Dict[str, Any]] diff --git a/core/dbt/graph/graph.py b/core/dbt/graph/graph.py index 122e2f4d29a..87ba64414fb 100644 --- a/core/dbt/graph/graph.py +++ b/core/dbt/graph/graph.py @@ -1,7 +1,8 @@ -from typing import Set, Iterable, Iterator, Optional, NewType +from functools import partial from itertools import product +from typing import Iterable, Iterator, NewType, Optional, Set + import networkx as nx # type: ignore -from functools import partial from dbt_common.exceptions import DbtInternalError diff --git a/core/dbt/graph/queue.py b/core/dbt/graph/queue.py index 64b8c8438ab..18ea15ac773 100644 --- a/core/dbt/graph/queue.py +++ b/core/dbt/graph/queue.py @@ -1,19 +1,20 @@ -import networkx as nx # type: ignore import threading - from queue import PriorityQueue -from typing import Dict, Set, List, Generator, Optional +from typing import Dict, Generator, List, Optional, Set -from .graph import UniqueId +import networkx as nx # type: ignore + +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( - SourceDefinition, Exposure, - Metric, GraphMemberNode, + Metric, + SourceDefinition, ) -from dbt.contracts.graph.manifest import Manifest from dbt.node_types import NodeType +from .graph import UniqueId + class GraphQueue: """A fancy queue that is backed by the dependency graph. @@ -24,8 +25,15 @@ class GraphQueue: the same time, as there is an unlocked race! """ - def __init__(self, graph: nx.DiGraph, manifest: Manifest, selected: Set[UniqueId]) -> None: - self.graph = graph + def __init__( + self, + graph: nx.DiGraph, + manifest: Manifest, + selected: Set[UniqueId], + preserve_edges: bool = True, + ) -> None: + # 'create_empty_copy' returns a copy of the graph G with all of the edges removed, and leaves nodes intact. + self.graph = graph if preserve_edges else nx.classes.function.create_empty_copy(graph) self.manifest = manifest self._selected = selected # store the queue as a priority queue. diff --git a/core/dbt/graph/selector.py b/core/dbt/graph/selector.py index 40ef6a1c9cc..0ca3842f926 100644 --- a/core/dbt/graph/selector.py +++ b/core/dbt/graph/selector.py @@ -1,22 +1,18 @@ -from typing import Set, List, Optional, Tuple +from typing import List, Optional, Set, Tuple -from .graph import Graph, UniqueId -from .queue import GraphQueue -from .selector_methods import MethodManager -from .selector_spec import SelectionCriteria, SelectionSpec, IndirectSelection - -from dbt_common.events.functions import fire_event, warn_or_error -from dbt.events.types import SelectorReportInvalidSelector, NoNodesForSelectionCriteria -from dbt.node_types import NodeType -from dbt.exceptions import ( - DbtInternalError, - InvalidSelectorError, -) -from dbt.contracts.graph.nodes import GraphMemberNode +from dbt import selected_resources from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import GraphMemberNode from dbt.contracts.state import PreviousState +from dbt.events.types import NoNodesForSelectionCriteria, SelectorReportInvalidSelector +from dbt.exceptions import DbtInternalError, InvalidSelectorError +from dbt.node_types import NodeType +from dbt_common.events.functions import fire_event, warn_or_error -from dbt import selected_resources +from .graph import Graph, UniqueId +from .queue import GraphQueue +from .selector_methods import MethodManager +from .selector_spec import IndirectSelection, SelectionCriteria, SelectionSpec def get_package_names(nodes): @@ -323,7 +319,7 @@ def get_selected(self, spec: SelectionSpec) -> Set[UniqueId]: return filtered_nodes - def get_graph_queue(self, spec: SelectionSpec) -> GraphQueue: + def get_graph_queue(self, spec: SelectionSpec, preserve_edges: bool = True) -> GraphQueue: """Returns a queue over nodes in the graph that tracks progress of dependecies. """ @@ -334,7 +330,7 @@ def get_graph_queue(self, spec: SelectionSpec) -> GraphQueue: # Construct a new graph using the selected_nodes new_graph = self.full_graph.get_subset_graph(selected_nodes) # should we give a way here for consumers to mutate the graph? - return GraphQueue(new_graph.graph, self.manifest, selected_nodes) + return GraphQueue(new_graph.graph, self.manifest, selected_nodes, preserve_edges) class ResourceTypeSelector(NodeSelector): diff --git a/core/dbt/graph/selector_methods.py b/core/dbt/graph/selector_methods.py index 25d3af0d493..380bad2e273 100644 --- a/core/dbt/graph/selector_methods.py +++ b/core/dbt/graph/selector_methods.py @@ -2,35 +2,41 @@ from fnmatch import fnmatch from itertools import chain from pathlib import Path -from typing import Set, List, Dict, Iterator, Tuple, Any, Union, Type, Optional, Callable - -from dbt_common.dataclass_schema import StrEnum - -from .graph import UniqueId +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Type, + Union, +) from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( - SingularTestNode, Exposure, - Metric, GenericTestNode, - SourceDefinition, - ResultNode, ManifestNode, + Metric, ModelNode, - UnitTestDefinition, + ResultNode, SavedQuery, SemanticModel, + SingularTestNode, + SourceDefinition, + UnitTestDefinition, ) from dbt.contracts.graph.unparsed import UnparsedVersion from dbt.contracts.state import PreviousState -from dbt_common.exceptions import ( - DbtInternalError, - DbtRuntimeError, -) from dbt.node_types import NodeType +from dbt_common.dataclass_schema import StrEnum from dbt_common.events.contextvars import get_project_root +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError +from .graph import UniqueId SELECTOR_GLOB = "*" SELECTOR_DELIMITER = ":" @@ -57,6 +63,7 @@ class MethodName(StrEnum): Version = "version" SemanticModel = "semantic_model" SavedQuery = "saved_query" + UnitTest = "unit_test" def is_selected_node(fqn: List[str], node_selector: str, is_versioned: bool) -> bool: @@ -419,6 +426,31 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu yield unique_id +class UnitTestSelectorMethod(SelectorMethod): + def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: + parts = selector.split(".") + target_package = SELECTOR_GLOB + if len(parts) == 1: + target_name = parts[0] + elif len(parts) == 2: + target_package, target_name = parts + else: + msg = ( + 'Invalid unit test selector value "{}". Saved queries must be of ' + "the form ${{unit_test_name}} or " + "${{unit_test_package_name.unit_test_name}}" + ).format(selector) + raise DbtRuntimeError(msg) + + for unique_id, node in self.unit_tests(included_nodes): + if not fnmatch(node.package_name, target_package): + continue + if not fnmatch(node.name, target_name): + continue + + yield unique_id + + class PathSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """Yields nodes from included that match the given path.""" @@ -455,6 +487,10 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu class PackageSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """Yields nodes from included that have the specified package""" + # `this` is an alias for the current dbt project name + if selector == "this" and self.manifest.metadata.project_name is not None: + selector = self.manifest.metadata.project_name + for unique_id, node in self.all_nodes(included_nodes): if fnmatch(node.package_name, selector): yield unique_id @@ -691,9 +727,6 @@ def check_modified_contract(old: Optional[SelectorTarget], new: SelectorTarget) return check_modified_contract - def check_new(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool: - return old is None - def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: if self.previous_state is None or self.previous_state.manifest is None: raise DbtRuntimeError("Got a state selector method, but no comparison manifest") @@ -873,6 +906,7 @@ class MethodManager: MethodName.Version: VersionSelectorMethod, MethodName.SemanticModel: SemanticModelSelectorMethod, MethodName.SavedQuery: SavedQuerySelectorMethod, + MethodName.UnitTest: UnitTestSelectorMethod, } def __init__( diff --git a/core/dbt/graph/selector_spec.py b/core/dbt/graph/selector_spec.py index 89b61bf20a2..e801aef7396 100644 --- a/core/dbt/graph/selector_spec.py +++ b/core/dbt/graph/selector_spec.py @@ -2,14 +2,15 @@ import re from abc import ABCMeta, abstractmethod from dataclasses import dataclass +from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union + +from dbt.exceptions import InvalidSelectorError +from dbt.flags import get_flags from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.exceptions import DbtRuntimeError -from typing import Set, Iterator, List, Optional, Dict, Union, Any, Iterable, Tuple from .graph import UniqueId from .selector_methods import MethodName -from dbt_common.exceptions import DbtRuntimeError -from dbt.exceptions import InvalidSelectorError - RAW_SELECTOR_PATTERN = re.compile( r"\A" @@ -110,7 +111,6 @@ def selection_criteria_from_dict( cls, raw: Any, dct: Dict[str, Any], - indirect_selection: IndirectSelection = IndirectSelection.Eager, ) -> "SelectionCriteria": if "value" not in dct: raise DbtRuntimeError(f'Invalid node spec "{raw}" - no search value!') @@ -121,7 +121,7 @@ def selection_criteria_from_dict( # If defined field in selector, override CLI flag indirect_selection = IndirectSelection( - dct.get("indirect_selection", None) or indirect_selection + dct.get("indirect_selection", get_flags().INDIRECT_SELECTION) ) return cls( @@ -158,17 +158,13 @@ def dict_from_single_spec(cls, raw: str): return dct @classmethod - def from_single_spec( - cls, raw: str, indirect_selection: IndirectSelection = IndirectSelection.Eager - ) -> "SelectionCriteria": + def from_single_spec(cls, raw: str) -> "SelectionCriteria": result = RAW_SELECTOR_PATTERN.match(raw) if result is None: # bad spec! raise DbtRuntimeError(f'Invalid selector spec "{raw}"') - return cls.selection_criteria_from_dict( - raw, result.groupdict(), indirect_selection=indirect_selection - ) + return cls.selection_criteria_from_dict(raw, result.groupdict()) class BaseSelectionGroup(dbtClassMixin, Iterable[SelectionSpec], metaclass=ABCMeta): diff --git a/core/dbt/hooks.py b/core/dbt/hooks.py index c49d04d07b1..131b28a2449 100644 --- a/core/dbt/hooks.py +++ b/core/dbt/hooks.py @@ -1,7 +1,7 @@ -from dbt_common.dataclass_schema import StrEnum import json +from typing import Any, Dict, Union -from typing import Union, Dict, Any +from dbt_common.dataclass_schema import StrEnum class ModelHookType(StrEnum): diff --git a/core/dbt/internal_deprecations.py b/core/dbt/internal_deprecations.py index f85c65bf42b..22d389586f2 100644 --- a/core/dbt/internal_deprecations.py +++ b/core/dbt/internal_deprecations.py @@ -1,8 +1,8 @@ import functools from typing import Optional -from dbt_common.events.functions import warn_or_error from dbt.events.types import InternalDeprecation +from dbt_common.events.functions import warn_or_error def deprecated(suggested_action: str, version: str, reason: Optional[str]): diff --git a/core/dbt/logger.py b/core/dbt/logger.py deleted file mode 100644 index 33332417f2b..00000000000 --- a/core/dbt/logger.py +++ /dev/null @@ -1,524 +0,0 @@ -import dbt.flags -import dbt_common.ui - -import json -import logging -import sys -import time -import warnings -from dataclasses import dataclass -from datetime import datetime -from typing import Optional, List, ContextManager, Callable, Dict, Any, Set - -import logbook - -from dbt_common.context import get_invocation_context -from dbt_common.dataclass_schema import dbtClassMixin - -STDOUT_LOG_FORMAT = "{record.message}" -DEBUG_LOG_FORMAT = "{record.time:%Y-%m-%d %H:%M:%S.%f%z} ({record.thread_name}): {record.message}" - - -def get_secret_env() -> List[str]: - return get_invocation_context().env_secrets - - -ExceptionInformation = str - - -@dataclass -class LogMessage(dbtClassMixin): - timestamp: datetime - message: str - channel: str - level: int - levelname: str - thread_name: str - process: int - extra: Optional[Dict[str, Any]] = None - exc_info: Optional[ExceptionInformation] = None - - @classmethod - def from_record_formatted(cls, record: logbook.LogRecord, message: str): - extra = dict(record.extra) - log_message = LogMessage( - timestamp=record.time, - message=message, - channel=record.channel, - level=record.level, - levelname=logbook.get_level_name(record.level), - extra=extra, - thread_name=record.thread_name, - process=record.process, - exc_info=record.formatted_exception, - ) - return log_message - - -class LogMessageFormatter(logbook.StringFormatter): - def __call__(self, record, handler): - data = self.format_record(record, handler) - exc = self.format_exception(record) - if exc: - data.exc_info = exc - return data - - def format_record(self, record, handler): - message = super().format_record(record, handler) - return LogMessage.from_record_formatted(record, message) - - -class JsonFormatter(LogMessageFormatter): - def __call__(self, record, handler): - """Return a the record converted to LogMessage's JSON form""" - # utils imports exceptions which imports logger... - import dbt.utils - - log_message = super().__call__(record, handler) - dct = log_message.to_dict(omit_none=True) - return json.dumps(dct, cls=dbt.utils.JSONEncoder) - - -class FormatterMixin: - def __init__(self, format_string) -> None: - self._text_format_string = format_string - self.formatter_class = logbook.StringFormatter - # triggers a formatter update via logbook.StreamHandler - self.format_string = self._text_format_string - - def format_json(self): - # set our formatter to the json formatter - self.formatter_class = JsonFormatter - self.format_string = STDOUT_LOG_FORMAT - - def format_text(self): - # set our formatter to the regular stdout/stderr handler - self.formatter_class = logbook.StringFormatter - self.format_string = self._text_format_string - - def reset(self): - raise NotImplementedError("reset() not implemented in FormatterMixin subclass") - - -class OutputHandler(logbook.StreamHandler, FormatterMixin): - """Output handler. - - The `format_string` parameter only changes the default text output, not - debug mode or json. - """ - - def __init__( - self, - stream, - level=logbook.INFO, - format_string=STDOUT_LOG_FORMAT, - bubble=True, - ) -> None: - self._default_format = format_string - logbook.StreamHandler.__init__( - self, - stream=stream, - level=level, - format_string=format_string, - bubble=bubble, - ) - FormatterMixin.__init__(self, format_string) - - def set_text_format(self, format_string: str): - """Set the text format to format_string. In JSON output mode, this is - a noop. - """ - if self.formatter_class is logbook.StringFormatter: - # reset text format - self._text_format_string = format_string - self.format_text() - - def reset(self): - self.level = logbook.INFO - self._text_format_string = self._default_format - self.format_text() - - def should_handle(self, record): - if record.level < self.level: - return False - text_mode = self.formatter_class is logbook.StringFormatter - if text_mode and record.extra.get("json_only", False): - return False - elif not text_mode and record.extra.get("text_only", False): - return False - else: - return True - - -def _root_channel(record: logbook.LogRecord) -> str: - return record.channel.split(".")[0] - - -class Relevel(logbook.Processor): - def __init__( - self, - allowed: List[str], - min_level=logbook.WARNING, - target_level=logbook.DEBUG, - ) -> None: - self.allowed: Set[str] = set(allowed) - self.min_level = min_level - self.target_level = target_level - super().__init__() - - def process(self, record): - if _root_channel(record) in self.allowed: - return - record.extra["old_level"] = record.level - # suppress logs at/below our min level by lowering them to NOTSET - if record.level < self.min_level: - record.level = logbook.NOTSET - # if we didn't mess with it, then lower all logs above our level to - # our target level. - else: - record.level = self.target_level - - -class TextOnly(logbook.Processor): - def process(self, record): - record.extra["text_only"] = True - - -class TimingProcessor(logbook.Processor): - def __init__(self, timing_info: Optional[dbtClassMixin] = None) -> None: - self.timing_info = timing_info - super().__init__() - - def process(self, record): - if self.timing_info is not None: - record.extra["timing_info"] = self.timing_info.to_dict(omit_none=True) - - -class DbtProcessState(logbook.Processor): - def __init__(self, value: str) -> None: - self.value = value - super().__init__() - - def process(self, record): - overwrite = "run_state" not in record.extra or record.extra["run_state"] == "internal" - if overwrite: - record.extra["run_state"] = self.value - - -class DbtModelState(logbook.Processor): - def __init__(self, state: Dict[str, str]) -> None: - self.state = state - super().__init__() - - def process(self, record): - record.extra.update(self.state) - - -class DbtStatusMessage(logbook.Processor): - def process(self, record): - record.extra["is_status_message"] = True - - -class UniqueID(logbook.Processor): - def __init__(self, unique_id: str) -> None: - self.unique_id = unique_id - super().__init__() - - def process(self, record): - record.extra["unique_id"] = self.unique_id - - -class NodeCount(logbook.Processor): - def __init__(self, node_count: int) -> None: - self.node_count = node_count - super().__init__() - - def process(self, record): - record.extra["node_count"] = self.node_count - - -class NodeMetadata(logbook.Processor): - def __init__(self, node, index) -> None: - self.node = node - self.index = index - super().__init__() - - def mapping_keys(self): - return [] - - def process_keys(self, record): - for attr, key in self.mapping_keys(): - value = getattr(self.node, attr, None) - if value is not None: - record.extra[key] = value - - def process(self, record): - self.process_keys(record) - record.extra["node_index"] = self.index - - -class ModelMetadata(NodeMetadata): - def mapping_keys(self): - return [ - ("alias", "node_alias"), - ("schema", "node_schema"), - ("database", "node_database"), - ("original_file_path", "node_path"), - ("name", "node_name"), - ("resource_type", "resource_type"), - ("depends_on_nodes", "depends_on"), - ] - - def process_config(self, record): - if hasattr(self.node, "config"): - materialized = getattr(self.node.config, "materialized", None) - if materialized is not None: - record.extra["node_materialized"] = materialized - - def process(self, record): - super().process(record) - self.process_config(record) - - -class HookMetadata(NodeMetadata): - def mapping_keys(self): - return [ - ("name", "node_name"), - ("resource_type", "resource_type"), - ] - - -class TimestampNamed(logbook.Processor): - def __init__(self, name: str) -> None: - self.name = name - super().__init__() - - def process(self, record): - super().process(record) - record.extra[self.name] = datetime.utcnow().isoformat() - - -class ScrubSecrets(logbook.Processor): - def process(self, record): - for secret in get_secret_env(): - record.message = str(record.message).replace(secret, "*****") - - -logger = logbook.Logger("dbt") -# provide this for the cache, disabled by default -CACHE_LOGGER = logbook.Logger("dbt.cache") -CACHE_LOGGER.disable() - -warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") - -initialized = False - - -def make_log_dir_if_missing(log_dir): - import dbt_common.clients.system - - dbt_common.clients.system.make_directory(log_dir) - - -class DebugWarnings(logbook.compat.redirected_warnings): - """Log warnings, except send them to 'debug' instead of 'warning' level.""" - - def make_record(self, message, exception, filename, lineno): - rv = super().make_record(message, exception, filename, lineno) - rv.level = logbook.DEBUG - rv.extra["from_warnings"] = True - return rv - - -# push Python warnings to debug level logs. This will suppress all import-time -# warnings. -DebugWarnings().__enter__() - - -class LogManager(logbook.NestedSetup): - def __init__(self, stdout=sys.stdout, stderr=sys.stderr) -> None: - self.stdout = stdout - self.stderr = stderr - self._null_handler = logbook.NullHandler() - self._output_handler = OutputHandler(self.stdout) - self._relevel_processor = Relevel(allowed=["dbt", "werkzeug"]) - self._state_processor = DbtProcessState("internal") - self._scrub_processor = ScrubSecrets() - # keep track of whether we've already entered to decide if we should - # be actually pushing. This allows us to log in main() and also - # support entering dbt execution via handle_and_check. - self._stack_depth = 0 - super().__init__( - [ - self._null_handler, - self._output_handler, - self._relevel_processor, - self._state_processor, - self._scrub_processor, - ] - ) - - def push_application(self): - self._stack_depth += 1 - if self._stack_depth == 1: - super().push_application() - - def pop_application(self): - self._stack_depth -= 1 - if self._stack_depth == 0: - super().pop_application() - - def disable(self): - self.add_handler(logbook.NullHandler()) - - def add_handler(self, handler): - """add an handler to the log manager that runs before the file handler.""" - self.objects.append(handler) - - def set_path(self, _): - """No-op that allows dbt-rpc to not break. See GH #7661""" - pass - - @property - def initialized(self): - """Dummy return value for dbt-rpc. See GH#7661""" - return True - - # this is used by `dbt ls` to allow piping stdout to jq, etc - def stderr_console(self): - """Output to stderr at WARNING level instead of stdout""" - self._output_handler.stream = self.stderr - self._output_handler.level = logbook.WARNING - - def stdout_console(self): - """enable stdout and disable stderr""" - self._output_handler.stream = self.stdout - self._output_handler.level = logbook.INFO - - def set_debug(self): - self._output_handler.set_text_format(DEBUG_LOG_FORMAT) - self._output_handler.level = logbook.DEBUG - - def format_json(self): - for handler in self.objects: - if isinstance(handler, FormatterMixin): - handler.format_json() - - def format_text(self): - for handler in self.objects: - if isinstance(handler, FormatterMixin): - handler.format_text() - - def reset_handlers(self): - """Reset the handlers to their defaults. This is nice in testing!""" - self.stdout_console() - for handler in self.objects: - if isinstance(handler, FormatterMixin): - handler.reset() - - def set_output_stream(self, stream, error=None): - if error is None: - error = stream - - if self._output_handler.stream is self.stdout: - self._output_handler.stream = stream - elif self._output_handler.stream is self.stderr: - self._output_handler.stream = error - - self.stdout = stream - self.stderr = error - - -log_manager = LogManager() - - -def log_cache_events(flag): - """Set the cache logger to propagate its messages based on the given flag.""" - # the flag is True if we should log, and False if we shouldn't, so disabled - # is the inverse. - CACHE_LOGGER.disabled = not flag - - -if not dbt.flags.ENABLE_LEGACY_LOGGER: - logger.disable() -GLOBAL_LOGGER = logger - - -class LogMessageHandler(logbook.Handler): - formatter_class = LogMessageFormatter - - def format_logmessage(self, record): - """Format a LogRecord into a LogMessage""" - message = self.format(record) - return LogMessage.from_record_formatted(record, message) - - -class ListLogHandler(LogMessageHandler): - def __init__( - self, - level: int = logbook.NOTSET, - filter: Optional[Callable] = None, - bubble: bool = False, - lst: Optional[List[LogMessage]] = None, - ) -> None: - super().__init__(level, filter, bubble) - if lst is None: - lst = [] - self.records: List[LogMessage] = lst - - def should_handle(self, record): - """Only ever emit dbt-sourced log messages to the ListHandler.""" - if _root_channel(record) != "dbt": - return False - return super().should_handle(record) - - def emit(self, record: logbook.LogRecord): - as_dict = self.format_logmessage(record) - self.records.append(as_dict) - - -def _env_log_level(var_name: str) -> int: - # convert debugging environment variable name to a log level - if dbt.flags.env_set_truthy(var_name): - return logging.DEBUG - else: - return logging.ERROR - - -LOG_LEVEL_GOOGLE = _env_log_level("DBT_GOOGLE_DEBUG_LOGGING") -LOG_LEVEL_SNOWFLAKE = _env_log_level("DBT_SNOWFLAKE_CONNECTOR_DEBUG_LOGGING") -LOG_LEVEL_BOTOCORE = _env_log_level("DBT_BOTOCORE_DEBUG_LOGGING") -LOG_LEVEL_HTTP = _env_log_level("DBT_HTTP_DEBUG_LOGGING") -LOG_LEVEL_WERKZEUG = _env_log_level("DBT_WERKZEUG_DEBUG_LOGGING") - -logging.getLogger("botocore").setLevel(LOG_LEVEL_BOTOCORE) -logging.getLogger("requests").setLevel(LOG_LEVEL_HTTP) -logging.getLogger("urllib3").setLevel(LOG_LEVEL_HTTP) -logging.getLogger("google").setLevel(LOG_LEVEL_GOOGLE) -logging.getLogger("snowflake.connector").setLevel(LOG_LEVEL_SNOWFLAKE) - -logging.getLogger("parsedatetime").setLevel(logging.ERROR) -logging.getLogger("werkzeug").setLevel(LOG_LEVEL_WERKZEUG) - - -def list_handler( - lst: Optional[List[LogMessage]], - level=logbook.NOTSET, -) -> ContextManager: - """Return a context manager that temporarily attaches a list to the logger.""" - return ListLogHandler(lst=lst, level=level, bubble=True) - - -def get_timestamp(): - return time.strftime("%H:%M:%S") - - -def timestamped_line(msg: str) -> str: - return "{} | {}".format(get_timestamp(), msg) - - -def print_timestamped_line(msg: str, use_color: Optional[str] = None): - if use_color is not None: - msg = dbt_common.ui.color(msg, use_color) - - GLOBAL_LOGGER.info(timestamped_line(msg)) diff --git a/core/dbt/mp_context.py b/core/dbt/mp_context.py index 19cefd99511..0a39fd4da96 100644 --- a/core/dbt/mp_context.py +++ b/core/dbt/mp_context.py @@ -1,7 +1,6 @@ from multiprocessing import get_context from multiprocessing.context import SpawnContext - _MP_CONTEXT = get_context("spawn") diff --git a/core/dbt/node_types.py b/core/dbt/node_types.py index 229c97352f3..52503f46ba2 100644 --- a/core/dbt/node_types.py +++ b/core/dbt/node_types.py @@ -1,7 +1,12 @@ from typing import List # preserving import path during dbt/artifacts refactor -from dbt.artifacts.resources.types import NodeType, AccessType, RunHookType, ModelLanguage # noqa +from dbt.artifacts.resources.types import ( # noqa + AccessType, + ModelLanguage, + NodeType, + RunHookType, +) EXECUTABLE_NODE_TYPES: List["NodeType"] = [ NodeType.Model, diff --git a/core/dbt/parser/__init__.py b/core/dbt/parser/__init__.py index ee0490ecb61..04f345fe107 100644 --- a/core/dbt/parser/__init__.py +++ b/core/dbt/parser/__init__.py @@ -1,24 +1,23 @@ -from .analysis import AnalysisParser # noqa -from .base import Parser, ConfiguredParser # noqa -from .singular_test import SingularTestParser # noqa -from .generic_test import GenericTestParser # noqa -from .docs import DocumentationParser # noqa -from .hooks import HookParser # noqa -from .macros import MacroParser # noqa -from .models import ModelParser # noqa -from .schemas import SchemaParser # noqa -from .seeds import SeedParser # noqa -from .snapshots import SnapshotParser # noqa - from . import ( # noqa analysis, base, - generic_test, - singular_test, docs, + generic_test, hooks, macros, models, schemas, + singular_test, snapshots, ) +from .analysis import AnalysisParser # noqa +from .base import ConfiguredParser, Parser # noqa +from .docs import DocumentationParser # noqa +from .generic_test import GenericTestParser # noqa +from .hooks import HookParser # noqa +from .macros import MacroParser # noqa +from .models import ModelParser # noqa +from .schemas import SchemaParser # noqa +from .seeds import SeedParser # noqa +from .singular_test import SingularTestParser # noqa +from .snapshots import SnapshotParser # noqa diff --git a/core/dbt/parser/base.py b/core/dbt/parser/base.py index e345a74183c..aabc29f5760 100644 --- a/core/dbt/parser/base.py +++ b/core/dbt/parser/base.py @@ -1,33 +1,30 @@ import abc import itertools import os -from typing import List, Dict, Any, Generic, Optional, TypeVar +from typing import Any, Dict, Generic, List, Optional, TypeVar -from dbt_common.dataclass_schema import ValidationError - -from dbt import utils -from dbt.clients.jinja import MacroGenerator -from dbt.context.providers import ( - generate_parser_model_context, - generate_generate_name_macro_context, -) +from dbt import hooks, utils from dbt.adapters.factory import get_adapter # noqa: F401 from dbt.artifacts.resources import Contract -from dbt.clients.jinja import get_rendered +from dbt.clients.jinja import MacroGenerator, get_rendered from dbt.config import Project, RuntimeConfig from dbt.context.context_config import ContextConfig +from dbt.context.providers import ( + generate_generate_name_macro_context, + generate_parser_model_context, +) from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import BaseNode, ManifestNode from dbt.contracts.graph.unparsed import Docs, UnparsedNode from dbt.exceptions import ( - DbtInternalError, ConfigUpdateError, + DbtInternalError, DictParseError, InvalidAccessTypeError, ) -from dbt import hooks -from dbt.node_types import NodeType, ModelLanguage, AccessType +from dbt.node_types import AccessType, ModelLanguage, NodeType from dbt.parser.search import FileBlock +from dbt_common.dataclass_schema import ValidationError # internally, the parser may store a less-restrictive type that will be # transformed into the final type. But it will have to be derived from diff --git a/core/dbt/parser/common.py b/core/dbt/parser/common.py index a4f990147d2..5e5807a0335 100644 --- a/core/dbt/parser/common.py +++ b/core/dbt/parser/common.py @@ -1,21 +1,23 @@ +from dataclasses import dataclass +from typing import Any, Dict, Generic, List, Optional, TypeVar, Union + from dbt.artifacts.resources import ColumnInfo, NodeVersion -from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType +from dbt.contracts.graph.nodes import UnpatchedSourceDefinition from dbt.contracts.graph.unparsed import ( + HasColumnDocs, HasColumnProps, - UnparsedColumn, - UnparsedNodeUpdate, - UnparsedMacroUpdate, + HasColumnTests, UnparsedAnalysisUpdate, + UnparsedColumn, UnparsedExposure, + UnparsedMacroUpdate, UnparsedModelUpdate, + UnparsedNodeUpdate, ) -from dbt.contracts.graph.unparsed import HasColumnTests, HasColumnDocs -from dbt.contracts.graph.nodes import UnpatchedSourceDefinition +from dbt.exceptions import ParsingError from dbt.parser.search import FileBlock -from typing import List, Dict, Any, TypeVar, Generic, Union, Optional -from dataclasses import dataclass +from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType from dbt_common.exceptions import DbtInternalError -from dbt.exceptions import ParsingError def trimmed(inp: str) -> str: diff --git a/core/dbt/parser/docs.py b/core/dbt/parser/docs.py index edc7f83acfc..1bd7af5b55c 100644 --- a/core/dbt/parser/docs.py +++ b/core/dbt/parser/docs.py @@ -1,14 +1,12 @@ -from typing import Iterable, Optional - import re +from typing import Iterable, Optional from dbt.clients.jinja import get_rendered from dbt.contracts.files import SourceFile from dbt.contracts.graph.nodes import Documentation from dbt.node_types import NodeType from dbt.parser.base import Parser -from dbt.parser.search import BlockContents, FileBlock, BlockSearcher - +from dbt.parser.search import BlockContents, BlockSearcher, FileBlock SHOULD_PARSE_RE = re.compile(r"{[{%]") diff --git a/core/dbt/parser/fixtures.py b/core/dbt/parser/fixtures.py index b3002725674..b1469d0477d 100644 --- a/core/dbt/parser/fixtures.py +++ b/core/dbt/parser/fixtures.py @@ -1,6 +1,6 @@ -from typing import Optional, Dict, List, Any -from io import StringIO import csv +from io import StringIO +from typing import Any, Dict, List, Optional from dbt.contracts.files import FixtureSourceFile from dbt.contracts.graph.nodes import UnitTestFileFixture diff --git a/core/dbt/parser/generic_test.py b/core/dbt/parser/generic_test.py index 61dcc790163..3c10976fe30 100644 --- a/core/dbt/parser/generic_test.py +++ b/core/dbt/parser/generic_test.py @@ -2,14 +2,14 @@ import jinja2 -from dbt.exceptions import ParsingError -from dbt_common.clients import jinja +from dbt.contracts.files import SourceFile from dbt.contracts.graph.nodes import GenericTestNode, Macro from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt.contracts.files import SourceFile +from dbt.exceptions import ParsingError from dbt.node_types import NodeType from dbt.parser.base import BaseParser from dbt.parser.search import FileBlock +from dbt_common.clients import jinja from dbt_common.utils import MACRO_PREFIX diff --git a/core/dbt/parser/generic_test_builders.py b/core/dbt/parser/generic_test_builders.py index bc9d75fc3c5..8a4864be82e 100644 --- a/core/dbt/parser/generic_test_builders.py +++ b/core/dbt/parser/generic_test_builders.py @@ -1,21 +1,11 @@ import re from copy import deepcopy -from typing import ( - Generic, - Dict, - Any, - Tuple, - Optional, - List, -) +from typing import Any, Dict, Generic, List, Optional, Tuple from dbt.artifacts.resources import NodeVersion -from dbt.clients.jinja import get_rendered, GENERIC_TEST_KWARGS_NAME +from dbt.clients.jinja import GENERIC_TEST_KWARGS_NAME, get_rendered from dbt.contracts.graph.nodes import UnpatchedSourceDefinition -from dbt.contracts.graph.unparsed import ( - UnparsedNodeUpdate, - UnparsedModelUpdate, -) +from dbt.contracts.graph.unparsed import UnparsedModelUpdate, UnparsedNodeUpdate from dbt.exceptions import ( CustomMacroPopulatingConfigValueError, SameKeyNestedError, @@ -24,13 +14,13 @@ TestArgIncludesModelError, TestArgsNotDictError, TestDefinitionDictLengthError, - TestTypeError, TestNameNotStringError, + TestTypeError, UnexpectedTestNamePatternError, ) -from dbt_common.exceptions.macros import UndefinedMacroError from dbt.parser.common import Testable from dbt.utils import md5 +from dbt_common.exceptions.macros import UndefinedMacroError def synthesize_generic_test_names( diff --git a/core/dbt/parser/hooks.py b/core/dbt/parser/hooks.py index c09df3f8fc7..f8efd6a5a75 100644 --- a/core/dbt/parser/hooks.py +++ b/core/dbt/parser/hooks.py @@ -1,14 +1,14 @@ from dataclasses import dataclass -from typing import Iterable, Iterator, Union, List, Tuple +from typing import Iterable, Iterator, List, Tuple, Union from dbt.context.context_config import ContextConfig from dbt.contracts.files import FilePath from dbt.contracts.graph.nodes import HookNode -from dbt_common.exceptions import DbtInternalError from dbt.node_types import NodeType, RunHookType from dbt.parser.base import SimpleParser from dbt.parser.search import FileBlock from dbt.utils import get_pseudo_hook_path +from dbt_common.exceptions import DbtInternalError @dataclass diff --git a/core/dbt/parser/macros.py b/core/dbt/parser/macros.py index 23a9bf53060..f7eaef62b69 100644 --- a/core/dbt/parser/macros.py +++ b/core/dbt/parser/macros.py @@ -2,15 +2,15 @@ import jinja2 -from dbt_common.clients import jinja from dbt.clients.jinja import get_supported_languages -from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt.contracts.graph.nodes import Macro from dbt.contracts.files import FilePath, SourceFile +from dbt.contracts.graph.nodes import Macro +from dbt.contracts.graph.unparsed import UnparsedMacro from dbt.exceptions import ParsingError from dbt.node_types import NodeType from dbt.parser.base import BaseParser from dbt.parser.search import FileBlock, filesystem_search +from dbt_common.clients import jinja from dbt_common.utils import MACRO_PREFIX diff --git a/core/dbt/parser/manifest.py b/core/dbt/parser/manifest.py index 5e406d81d03..62741b2da6d 100644 --- a/core/dbt/parser/manifest.py +++ b/core/dbt/parser/manifest.py @@ -1,148 +1,125 @@ -from copy import deepcopy -from dataclasses import dataclass -from dataclasses import field import datetime +import json import os +import pprint +import time import traceback -from typing import ( - Dict, - Optional, - Mapping, - Callable, - Any, - List, - Type, - Union, - Tuple, - Set, -) +from copy import deepcopy +from dataclasses import dataclass, field from itertools import chain -import time +from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, Type, Union -from dbt.context.query_header import generate_query_header_context -from dbt.contracts.graph.semantic_manifest import SemanticManifest -from dbt_common.events.base_types import EventLevel -from dbt_common.exceptions.base import DbtValidationError -import dbt_common.utils -import json -import pprint -from dbt.mp_context import get_mp_context import msgpack +from dbt_semantic_interfaces.enum_extension import assert_values_exhausted +from dbt_semantic_interfaces.type_enums import MetricType +import dbt.deprecations import dbt.exceptions import dbt.tracking import dbt.utils -from dbt.flags import get_flags - +import dbt_common.utils +from dbt import plugins from dbt.adapters.factory import ( get_adapter, - get_relation_class_by_name, get_adapter_package_names, + get_relation_class_by_name, register_adapter, ) +from dbt.artifacts.resources import FileHash, NodeRelation, NodeVersion +from dbt.artifacts.schemas.base import Writable +from dbt.clients.jinja import MacroStack, get_rendered +from dbt.clients.jinja_static import statically_extract_macro_calls +from dbt.config import Project, RuntimeConfig from dbt.constants import ( MANIFEST_FILE_NAME, PARTIAL_PARSE_FILE_NAME, SEMANTIC_MANIFEST_FILE_NAME, - SECRET_ENV_PREFIX, ) -from dbt_common.helper_types import PathSet -from dbt_common.events.functions import fire_event, get_invocation_id, warn_or_error -from dbt_common.events.types import ( - Note, -) -from dbt.events.types import ( - PartialParsingErrorProcessingFile, - PartialParsingError, - ParsePerfInfoPath, - PartialParsingSkipParsing, - UnableToPartialParse, - PartialParsingNotEnabled, - ParsedFileLoadFailed, - InvalidDisabledTargetInTestNode, - NodeNotFoundOrDisabled, - StateCheckVarsHash, - DeprecatedModel, - DeprecatedReference, - SpacesInModelNameDeprecation, - TotalModelNamesWithSpacesDeprecation, - UpcomingReferenceDeprecation, -) -from dbt.logger import DbtProcessState -from dbt.node_types import NodeType, AccessType -from dbt.clients.jinja import get_rendered, MacroStack -from dbt.clients.jinja_static import statically_extract_macro_calls -from dbt_common.clients.system import ( - make_directory, - path_exists, - read_json, - write_file, -) -from dbt.config import Project, RuntimeConfig +from dbt.context.configured import generate_macro_context from dbt.context.docs import generate_runtime_docs_context from dbt.context.macro_resolver import MacroResolver, TestMacroNamespace -from dbt.context.configured import generate_macro_context from dbt.context.providers import ParseProvider, generate_runtime_macro_context +from dbt.context.query_header import generate_query_header_context from dbt.contracts.files import ParseFileType, SchemaSourceFile -from dbt.parser.read_files import ( - ReadFilesFromFileSystem, - load_source_file, - FileDiff, - ReadFilesFromDiff, - ReadFiles, -) -from dbt.parser.partial import PartialParsing, special_override_macros from dbt.contracts.graph.manifest import ( - Manifest, Disabled, MacroManifest, + Manifest, ManifestStateCheck, ParsingInfo, ) from dbt.contracts.graph.nodes import ( - SourceDefinition, - Macro, Exposure, + GenericTestNode, + Macro, + ManifestNode, Metric, + ModelNode, + ResultNode, SavedQuery, SeedNode, SemanticModel, - ManifestNode, - ResultNode, - ModelNode, + SourceDefinition, +) +from dbt.contracts.graph.semantic_manifest import SemanticManifest +from dbt.events.types import ( + DeprecatedModel, + DeprecatedReference, + InvalidDisabledTargetInTestNode, + NodeNotFoundOrDisabled, + ParsedFileLoadFailed, + ParsePerfInfoPath, + PartialParsingError, + PartialParsingErrorProcessingFile, + PartialParsingNotEnabled, + PartialParsingSkipParsing, + SpacesInResourceNameDeprecation, + StateCheckVarsHash, + UnableToPartialParse, + UpcomingReferenceDeprecation, ) -from dbt.artifacts.resources import NodeRelation, NodeVersion, FileHash -from dbt.artifacts.schemas.base import Writable from dbt.exceptions import ( - TargetNotFoundError, AmbiguousAliasError, InvalidAccessTypeError, + TargetNotFoundError, scrub_secrets, ) -from dbt.parser.base import Parser +from dbt.flags import get_flags +from dbt.mp_context import get_mp_context +from dbt.node_types import AccessType, NodeType from dbt.parser.analysis import AnalysisParser -from dbt.parser.generic_test import GenericTestParser -from dbt.parser.singular_test import SingularTestParser +from dbt.parser.base import Parser from dbt.parser.docs import DocumentationParser from dbt.parser.fixtures import FixtureParser +from dbt.parser.generic_test import GenericTestParser from dbt.parser.hooks import HookParser from dbt.parser.macros import MacroParser from dbt.parser.models import ModelParser +from dbt.parser.partial import PartialParsing, special_override_macros +from dbt.parser.read_files import ( + FileDiff, + ReadFiles, + ReadFilesFromDiff, + ReadFilesFromFileSystem, + load_source_file, +) from dbt.parser.schemas import SchemaParser from dbt.parser.search import FileBlock from dbt.parser.seeds import SeedParser +from dbt.parser.singular_test import SingularTestParser from dbt.parser.snapshots import SnapshotParser from dbt.parser.sources import SourcePatcher from dbt.parser.unit_tests import process_models_for_unit_test from dbt.version import __version__ - +from dbt_common.clients.system import make_directory, path_exists, read_json, write_file +from dbt_common.constants import SECRET_ENV_PREFIX from dbt_common.dataclass_schema import StrEnum, dbtClassMixin -from dbt import plugins - -from dbt_semantic_interfaces.enum_extension import assert_values_exhausted -from dbt_semantic_interfaces.type_enums import MetricType +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event, get_invocation_id, warn_or_error +from dbt_common.events.types import Note +from dbt_common.exceptions.base import DbtValidationError +from dbt_common.helper_types import PathSet -PARSING_STATE = DbtProcessState("parsing") PERF_INFO_FILE_NAME = "perf_info.json" @@ -233,7 +210,7 @@ class ManifestLoaderInfo(dbtClassMixin, Writable): projects: List[ProjectLoaderInfo] = field(default_factory=list) _project_index: Dict[str, ProjectLoaderInfo] = field(default_factory=dict) - def __post_serialize__(self, dct): + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): del dct["_project_index"] return dct @@ -315,33 +292,32 @@ def get_full_manifest( file_diff_dct = read_json(file_diff_path) file_diff = FileDiff.from_dict(file_diff_dct) - with PARSING_STATE: # set up logbook.Processor for parsing - # Start performance counting - start_load_all = time.perf_counter() + # Start performance counting + start_load_all = time.perf_counter() - projects = config.load_dependencies() - loader = cls( - config, - projects, - macro_hook=macro_hook, - file_diff=file_diff, - ) + projects = config.load_dependencies() + loader = cls( + config, + projects, + macro_hook=macro_hook, + file_diff=file_diff, + ) - manifest = loader.load() + manifest = loader.load() - _check_manifest(manifest, config) - manifest.build_flat_graph() + _check_manifest(manifest, config) + manifest.build_flat_graph() - # This needs to happen after loading from a partial parse, - # so that the adapter has the query headers from the macro_hook. - loader.save_macros_to_adapter(adapter) + # This needs to happen after loading from a partial parse, + # so that the adapter has the query headers from the macro_hook. + loader.save_macros_to_adapter(adapter) - # Save performance info - loader._perf_info.load_all_elapsed = time.perf_counter() - start_load_all - loader.track_project_load() + # Save performance info + loader._perf_info.load_all_elapsed = time.perf_counter() - start_load_all + loader.track_project_load() - if write_perf_info: - loader.write_perf_info(config.project_target_path) + if write_perf_info: + loader.write_perf_info(config.project_target_path) return manifest @@ -488,6 +464,7 @@ def load(self) -> Manifest: self.process_docs(self.root_project) self.process_metrics(self.root_project) self.process_saved_queries(self.root_project) + self.process_model_inferred_primary_keys() self.check_valid_group_config() self.check_valid_access_property() @@ -525,7 +502,7 @@ def load(self) -> Manifest: self.write_manifest_for_partial_parse() self.check_for_model_deprecations() - self.check_for_spaces_in_model_names() + self.check_for_spaces_in_resource_names() return self.manifest @@ -627,46 +604,43 @@ def check_for_model_deprecations(self): ) ) - def check_for_spaces_in_model_names(self): - """Validates that model names do not contain spaces + def check_for_spaces_in_resource_names(self): + """Validates that resource names do not contain spaces If `DEBUG` flag is `False`, logs only first bad model name If `DEBUG` flag is `True`, logs every bad model name - If `ALLOW_SPACES_IN_MODEL_NAMES` is `False`, logs are `ERROR` level and an exception is raised if any names are bad - If `ALLOW_SPACES_IN_MODEL_NAMES` is `True`, logs are `WARN` level + If `REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES` is `True`, logs are `ERROR` level and an exception is raised if any names are bad + If `REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES` is `False`, logs are `WARN` level """ - improper_model_names = 0 + improper_resource_names = 0 level = ( - EventLevel.WARN - if self.root_project.args.ALLOW_SPACES_IN_MODEL_NAMES - else EventLevel.ERROR + EventLevel.ERROR + if self.root_project.args.REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES + else EventLevel.WARN ) for node in self.manifest.nodes.values(): - if isinstance(node, ModelNode) and " " in node.name: - if improper_model_names == 0 or self.root_project.args.DEBUG: + if " " in node.name: + if improper_resource_names == 0 or self.root_project.args.DEBUG: fire_event( - SpacesInModelNameDeprecation( - model_name=node.name, - model_version=version_to_str(node.version), + SpacesInResourceNameDeprecation( + unique_id=node.unique_id, level=level.value, ), level=level, ) - improper_model_names += 1 - - if improper_model_names > 0: - fire_event( - TotalModelNamesWithSpacesDeprecation( - count_invalid_names=improper_model_names, - show_debug_hint=(not self.root_project.args.DEBUG), - level=level.value, - ), - level=level, - ) - - if level == EventLevel.ERROR: - raise DbtValidationError("Model names cannot contain spaces") + improper_resource_names += 1 + + if improper_resource_names > 0: + if level == EventLevel.WARN: + flags = get_flags() + dbt.deprecations.warn( + "resource-names-with-spaces", + count_invalid_names=improper_resource_names, + show_debug_hint=(not flags.DEBUG), + ) + else: # ERROR level + raise DbtValidationError("Resource names cannot contain spaces") def load_and_parse_macros(self, project_parser_files): for project in self.all_projects.values(): @@ -1091,18 +1065,16 @@ def load_macros( macro_hook: Callable[[Manifest], Any], base_macros_only=False, ) -> Manifest: - with PARSING_STATE: - # base_only/base_macros_only: for testing only, - # allows loading macros without running 'dbt deps' first - projects = root_config.load_dependencies(base_only=base_macros_only) + # base_only/base_macros_only: for testing only, + # allows loading macros without running 'dbt deps' first + projects = root_config.load_dependencies(base_only=base_macros_only) - # This creates a loader object, including result, - # and then throws it away, returning only the - # manifest - loader = cls(root_config, projects, macro_hook) - macro_manifest = loader.create_macro_manifest() + # This creates a loader object, including result, + # and then throws it away, returning only the + # manifest + loader = cls(root_config, projects, macro_hook) - return macro_manifest + return loader.create_macro_manifest() # Create tracking event for saving performance info def track_project_load(self): @@ -1174,6 +1146,15 @@ def process_saved_queries(self, config: RuntimeConfig): # 2. process `group_by` of SavedQuery for `depends_on`` _process_metrics_for_node(self.manifest, current_project, saved_query) + def process_model_inferred_primary_keys(self): + """Processes Model nodes to populate their `primary_key`.""" + for node in self.manifest.nodes.values(): + if not isinstance(node, ModelNode): + continue + generic_tests = self._get_generic_tests_for_model(node) + primary_key = node.infer_primary_key(generic_tests) + node.primary_key = sorted(primary_key) + def update_semantic_model(self, semantic_model) -> None: # This has to be done at the end of parsing because the referenced model # might have alias/schema/database fields that are updated by yaml config. @@ -1369,6 +1350,24 @@ def write_perf_info(self, target_path: str): write_file(path, json.dumps(self._perf_info, cls=dbt.utils.JSONEncoder, indent=4)) fire_event(ParsePerfInfoPath(path=path)) + def _get_generic_tests_for_model( + self, + model: ModelNode, + ) -> List[GenericTestNode]: + """Return a list of generic tests that are attached to the given model, including disabled tests""" + tests = [] + for _, node in self.manifest.nodes.items(): + if isinstance(node, GenericTestNode) and node.attached_node == model.unique_id: + tests.append(node) + for _, nodes in self.manifest.disabled.items(): + for disabled_node in nodes: + if ( + isinstance(disabled_node, GenericTestNode) + and disabled_node.attached_node == model.unique_id + ): + tests.append(disabled_node) + return tests + def invalid_target_fail_unless_test( node, @@ -1889,7 +1888,12 @@ def write_manifest(manifest: Manifest, target_path: str, which: Optional[str] = write_semantic_manifest(manifest=manifest, target_path=target_path) -def parse_manifest(runtime_config, write_perf_info, write, write_json): +def parse_manifest( + runtime_config: RuntimeConfig, + write_perf_info: bool, + write: bool, + write_json: bool, +) -> Manifest: register_adapter(runtime_config, get_mp_context()) adapter = get_adapter(runtime_config) adapter.set_macro_context_generator(generate_runtime_macro_context) @@ -1898,6 +1902,7 @@ def parse_manifest(runtime_config, write_perf_info, write, write_json): write_perf_info=write_perf_info, ) + # If we should (over)write the manifest in the target path, do that now if write and write_json: write_manifest(manifest, runtime_config.project_target_path) pm = plugins.get_plugin_manager(runtime_config.project_name) diff --git a/core/dbt/parser/models.py b/core/dbt/parser/models.py index 20ad0ada34d..dc3ff334bf4 100644 --- a/core/dbt/parser/models.py +++ b/core/dbt/parser/models.py @@ -1,30 +1,30 @@ +# New for Python models :p +import ast +import random from copy import deepcopy -from dbt.artifacts.resources import RefArgs -from dbt.context.context_config import ContextConfig -from dbt.contracts.graph.nodes import ModelNode -from dbt.flags import get_flags -from dbt.node_types import NodeType, ModelLanguage -from dbt.parser.base import SimpleSQLParser -from dbt.parser.search import FileBlock -from dbt.clients.jinja import get_rendered -import dbt.tracking as tracking -from dbt import utils -from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore from functools import reduce from itertools import chain -import random from typing import Any, Dict, Iterator, List, Optional, Tuple, Union -# New for Python models :p -import ast -from dbt_common.dataclass_schema import ValidationError +import dbt.tracking as tracking +from dbt import utils +from dbt.artifacts.resources import RefArgs +from dbt.clients.jinja import get_rendered +from dbt.context.context_config import ContextConfig +from dbt.contracts.graph.nodes import ModelNode from dbt.exceptions import ( ModelConfigError, ParsingError, PythonLiteralEvalError, PythonParsingError, ) +from dbt.flags import get_flags +from dbt.node_types import ModelLanguage, NodeType +from dbt.parser.base import SimpleSQLParser +from dbt.parser.search import FileBlock +from dbt_common.dataclass_schema import ValidationError from dbt_common.exceptions.macros import UndefinedMacroError +from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore dbt_function_key_words = set(["ref", "source", "config", "get"]) dbt_function_full_names = set(["dbt.ref", "dbt.source", "dbt.config", "dbt.config.get"]) diff --git a/core/dbt/parser/partial.py b/core/dbt/parser/partial.py index f9c558be6ba..c5060745299 100644 --- a/core/dbt/parser/partial.py +++ b/core/dbt/parser/partial.py @@ -1,24 +1,20 @@ import os from copy import deepcopy -from typing import MutableMapping, Dict, List, Callable +from typing import Callable, Dict, List, MutableMapping -from dbt.contracts.graph.manifest import Manifest +from dbt.constants import DEFAULT_ENV_PLACEHOLDER from dbt.contracts.files import ( AnySourceFile, ParseFileType, - parse_file_type_to_parser, SchemaSourceFile, + parse_file_type_to_parser, ) +from dbt.contracts.graph.manifest import Manifest +from dbt.events.types import PartialParsingEnabled, PartialParsingFile +from dbt.node_types import NodeType from dbt_common.context import get_invocation_context -from dbt_common.events.functions import fire_event from dbt_common.events.base_types import EventLevel -from dbt.events.types import ( - PartialParsingEnabled, - PartialParsingFile, -) -from dbt.constants import DEFAULT_ENV_PLACEHOLDER -from dbt.node_types import NodeType - +from dbt_common.events.functions import fire_event mssat_files = ( ParseFileType.Model, diff --git a/core/dbt/parser/read_files.py b/core/dbt/parser/read_files.py index 314a2a0fdd1..e5e25841f06 100644 --- a/core/dbt/parser/read_files.py +++ b/core/dbt/parser/read_files.py @@ -1,26 +1,27 @@ import os -import pathspec # type: ignore import pathlib from dataclasses import dataclass, field -from dbt_common.clients.system import load_file_contents +from typing import Dict, List, Mapping, MutableMapping, Optional, Protocol + +import pathspec # type: ignore + +from dbt.config import Project from dbt.contracts.files import ( + AnySourceFile, + FileHash, FilePath, + FixtureSourceFile, ParseFileType, - SourceFile, - FileHash, - AnySourceFile, SchemaSourceFile, - FixtureSourceFile, + SourceFile, ) -from dbt.config import Project -from dbt_common.dataclass_schema import dbtClassMixin -from dbt.parser.schemas import yaml_from_file, schema_file_keys +from dbt.events.types import InputFileDiffError from dbt.exceptions import ParsingError +from dbt.parser.schemas import schema_file_keys, yaml_from_file from dbt.parser.search import filesystem_search -from typing import Optional, Dict, List, Mapping, MutableMapping -from dbt.events.types import InputFileDiffError +from dbt_common.clients.system import load_file_contents +from dbt_common.dataclass_schema import dbtClassMixin from dbt_common.events.functions import fire_event -from typing import Protocol @dataclass diff --git a/core/dbt/parser/schema_generic_tests.py b/core/dbt/parser/schema_generic_tests.py index be1f4a6e939..14e2dbc862a 100644 --- a/core/dbt/parser/schema_generic_tests.py +++ b/core/dbt/parser/schema_generic_tests.py @@ -1,38 +1,43 @@ -import pathlib import itertools import os +import pathlib +from typing import Any, Dict, List, Optional, Union -from typing import List, Dict, Optional, Union, Any +from dbt.adapters.factory import get_adapter, get_adapter_package_names from dbt.artifacts.resources import NodeVersion, RefArgs -from dbt.parser.base import SimpleParser -from dbt.parser.generic_test_builders import TestBuilder -from dbt.parser.search import FileBlock +from dbt.clients.jinja import add_rendered_test_kwargs, get_rendered +from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context +from dbt.context.context_config import ContextConfig +from dbt.context.macro_resolver import MacroResolver from dbt.context.providers import generate_test_context +from dbt.contracts.files import FileHash +from dbt.contracts.graph.nodes import ( + GenericTestNode, + GraphMemberNode, + ManifestNode, + UnpatchedSourceDefinition, +) +from dbt.contracts.graph.unparsed import UnparsedColumn, UnparsedNodeUpdate +from dbt.exceptions import ( + CompilationError, + ParsingError, + SchemaConfigError, + TestConfigError, +) +from dbt.node_types import NodeType +from dbt.parser.base import SimpleParser from dbt.parser.common import ( - TestBlock, + GenericTestBlock, Testable, + TestBlock, TestDef, - GenericTestBlock, VersionedTestBlock, trimmed, ) -from dbt.contracts.graph.unparsed import UnparsedNodeUpdate, UnparsedColumn -from dbt.contracts.graph.nodes import ( - GenericTestNode, - UnpatchedSourceDefinition, - ManifestNode, - GraphMemberNode, -) -from dbt.context.context_config import ContextConfig -from dbt.context.configured import generate_schema_yml_context, SchemaYamlVars +from dbt.parser.generic_test_builders import TestBuilder +from dbt.parser.search import FileBlock +from dbt.utils import get_pseudo_test_path, md5 from dbt_common.dataclass_schema import ValidationError -from dbt.exceptions import SchemaConfigError, CompilationError, ParsingError, TestConfigError -from dbt.contracts.files import FileHash -from dbt.utils import md5, get_pseudo_test_path -from dbt.clients.jinja import get_rendered, add_rendered_test_kwargs -from dbt.adapters.factory import get_adapter, get_adapter_package_names -from dbt.node_types import NodeType -from dbt.context.macro_resolver import MacroResolver # This parser handles the tests that are defined in "schema" (yaml) files, on models, diff --git a/core/dbt/parser/schema_renderer.py b/core/dbt/parser/schema_renderer.py index 005f54f390e..3a0e1907593 100644 --- a/core/dbt/parser/schema_renderer.py +++ b/core/dbt/parser/schema_renderer.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Any, Dict from dbt.config.renderer import BaseRenderer, Keypath diff --git a/core/dbt/parser/schema_yaml_readers.py b/core/dbt/parser/schema_yaml_readers.py index b7c047d01dd..646de376763 100644 --- a/core/dbt/parser/schema_yaml_readers.py +++ b/core/dbt/parser/schema_yaml_readers.py @@ -1,31 +1,14 @@ -from dbt.parser.schemas import YamlReader, SchemaParser, ParseResult -from dbt.parser.common import YamlBlock -from dbt.node_types import NodeType -from dbt.contracts.graph.unparsed import ( - UnparsedDimension, - UnparsedDimensionTypeParams, - UnparsedEntity, - UnparsedExport, - UnparsedExposure, - UnparsedGroup, - UnparsedMeasure, - UnparsedMetric, - UnparsedMetricInput, - UnparsedMetricInputMeasure, - UnparsedMetricTypeParams, - UnparsedNonAdditiveDimension, - UnparsedQueryParams, - UnparsedSavedQuery, - UnparsedSemanticModel, - UnparsedConversionTypeParams, -) -from dbt.contracts.graph.nodes import ( - Exposure, - Group, - Metric, - SemanticModel, - SavedQuery, +from typing import Any, Dict, List, Optional, Union + +from dbt_semantic_interfaces.type_enums import ( + AggregationType, + ConversionCalculationType, + DimensionType, + EntityType, + MetricType, + TimeGranularity, ) + from dbt.artifacts.resources import ( ConversionTypeParams, Dimension, @@ -46,26 +29,41 @@ WhereFilter, WhereFilterIntersection, ) -from dbt_common.exceptions import DbtInternalError -from dbt.exceptions import YamlParseDictError, JSONValidationError -from dbt.context.providers import generate_parse_exposure, generate_parse_semantic_models - +from dbt.clients.jinja import get_rendered from dbt.context.context_config import ( BaseContextConfigGenerator, ContextConfigGenerator, UnrenderedConfigGenerator, ) -from dbt.clients.jinja import get_rendered -from dbt_common.dataclass_schema import ValidationError -from dbt_semantic_interfaces.type_enums import ( - AggregationType, - ConversionCalculationType, - DimensionType, - EntityType, - MetricType, - TimeGranularity, +from dbt.context.providers import ( + generate_parse_exposure, + generate_parse_semantic_models, ) -from typing import Any, Dict, List, Optional, Union +from dbt.contracts.graph.nodes import Exposure, Group, Metric, SavedQuery, SemanticModel +from dbt.contracts.graph.unparsed import ( + UnparsedConversionTypeParams, + UnparsedDimension, + UnparsedDimensionTypeParams, + UnparsedEntity, + UnparsedExport, + UnparsedExposure, + UnparsedGroup, + UnparsedMeasure, + UnparsedMetric, + UnparsedMetricInput, + UnparsedMetricInputMeasure, + UnparsedMetricTypeParams, + UnparsedNonAdditiveDimension, + UnparsedQueryParams, + UnparsedSavedQuery, + UnparsedSemanticModel, +) +from dbt.exceptions import JSONValidationError, YamlParseDictError +from dbt.node_types import NodeType +from dbt.parser.common import YamlBlock +from dbt.parser.schemas import ParseResult, SchemaParser, YamlReader +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import DbtInternalError def parse_where_filter( diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py index 838939b83fc..c8f222e15c3 100644 --- a/core/dbt/parser/schemas.py +++ b/core/dbt/parser/schemas.py @@ -1,25 +1,19 @@ import datetime import time - from abc import ABCMeta, abstractmethod -from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar from dataclasses import dataclass, field +from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar from dbt import deprecations -from dbt_common.contracts.constraints import ConstraintType, ModelLevelConstraint -from dbt_common.dataclass_schema import ValidationError, dbtClassMixin - from dbt.clients.yaml_helper import load_yaml_text -from dbt.parser.schema_renderer import SchemaYamlRenderer -from dbt.parser.schema_generic_tests import SchemaGenericTestParser +from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context from dbt.context.context_config import ContextConfig -from dbt.context.configured import generate_schema_yml_context, SchemaYamlVars from dbt.contracts.files import SchemaSourceFile from dbt.contracts.graph.nodes import ( - ParsedNodePatch, + ModelNode, ParsedMacroPatch, + ParsedNodePatch, UnpatchedSourceDefinition, - ModelNode, ) from dbt.contracts.graph.unparsed import ( HasColumnDocs, @@ -27,46 +21,49 @@ SourcePatch, UnparsedAnalysisUpdate, UnparsedMacroUpdate, - UnparsedNodeUpdate, UnparsedModelUpdate, + UnparsedNodeUpdate, UnparsedSourceDefinition, ) +from dbt.events.types import ( + MacroNotFoundForPatch, + NoNodeForYamlKey, + UnsupportedConstraintMaterialization, + ValidationWarning, + WrongResourceSchemaFile, +) from dbt.exceptions import ( + DbtInternalError, DuplicateMacroPatchNameError, DuplicatePatchPathError, DuplicateSourcePatchNameError, + InvalidAccessTypeError, JSONValidationError, - DbtInternalError, ParsingError, YamlLoadError, YamlParseDictError, YamlParseListError, - InvalidAccessTypeError, ) -from dbt_common.exceptions import DbtValidationError -from dbt_common.events.functions import warn_or_error -from dbt.events.types import ( - MacroNotFoundForPatch, - NoNodeForYamlKey, - ValidationWarning, - UnsupportedConstraintMaterialization, - WrongResourceSchemaFile, -) -from dbt.node_types import NodeType, AccessType +from dbt.node_types import AccessType, NodeType from dbt.parser.base import SimpleParser -from dbt.parser.search import FileBlock from dbt.parser.common import ( - YamlBlock, + ParserRef, TargetBlock, TestBlock, VersionedTestBlock, - ParserRef, + YamlBlock, trimmed, ) +from dbt.parser.schema_generic_tests import SchemaGenericTestParser +from dbt.parser.schema_renderer import SchemaYamlRenderer +from dbt.parser.search import FileBlock from dbt.utils import coerce_dict_str +from dbt_common.contracts.constraints import ConstraintType, ModelLevelConstraint +from dbt_common.dataclass_schema import ValidationError, dbtClassMixin +from dbt_common.events.functions import warn_or_error +from dbt_common.exceptions import DbtValidationError from dbt_common.utils import deep_merge - schema_file_keys = ( "models", "seeds", @@ -111,11 +108,24 @@ # =============================================================================== -def yaml_from_file(source_file: SchemaSourceFile) -> Dict[str, Any]: +def yaml_from_file(source_file: SchemaSourceFile) -> Optional[Dict[str, Any]]: """If loading the yaml fails, raise an exception.""" try: # source_file.contents can sometimes be None - return load_yaml_text(source_file.contents or "", source_file.path) + contents = load_yaml_text(source_file.contents or "", source_file.path) + + if contents is None: + return contents + + # When loaded_loaded_at_field is defined as None or null, it shows up in + # the dict but when it is not defined, it does not show up in the dict + # We need to capture this to be able to override source level settings later. + for source in contents.get("sources", []): + for table in source.get("tables", []): + if "loaded_at_field" in table: + table["loaded_at_field_present"] = True + + return contents except DbtValidationError as e: raise YamlLoadError( project_name=source_file.project_name, path=source_file.path.relative_path, exc=e diff --git a/core/dbt/parser/search.py b/core/dbt/parser/search.py index 36289833240..444fb4439eb 100644 --- a/core/dbt/parser/search.py +++ b/core/dbt/parser/search.py @@ -1,13 +1,24 @@ import os from dataclasses import dataclass -from typing import List, Callable, Iterable, Set, Union, Iterator, TypeVar, Generic, Optional +from typing import ( + Callable, + Generic, + Iterable, + Iterator, + List, + Optional, + Set, + TypeVar, + Union, +) + from pathspec import PathSpec # type: ignore -from dbt_common.clients.jinja import extract_toplevel_blocks, BlockTag -from dbt_common.clients.system import find_matching from dbt.config import Project -from dbt.contracts.files import FilePath, AnySourceFile -from dbt.exceptions import ParsingError, DbtInternalError +from dbt.contracts.files import AnySourceFile, FilePath +from dbt.exceptions import DbtInternalError, ParsingError +from dbt_common.clients.jinja import BlockTag, extract_toplevel_blocks +from dbt_common.clients.system import find_matching # What's the point of wrapping a SourceFile with this class? diff --git a/core/dbt/parser/snapshots.py b/core/dbt/parser/snapshots.py index 27749a5e78b..259c4289817 100644 --- a/core/dbt/parser/snapshots.py +++ b/core/dbt/parser/snapshots.py @@ -1,14 +1,13 @@ import os from typing import List -from dbt_common.dataclass_schema import ValidationError - from dbt.contracts.graph.nodes import IntermediateSnapshotNode, SnapshotNode from dbt.exceptions import SnapshopConfigError from dbt.node_types import NodeType from dbt.parser.base import SQLParser from dbt.parser.search import BlockContents, BlockSearcher, FileBlock from dbt.utils import split_path +from dbt_common.dataclass_schema import ValidationError class SnapshotParser(SQLParser[IntermediateSnapshotNode, SnapshotNode]): diff --git a/core/dbt/parser/sources.py b/core/dbt/parser/sources.py index 1f57efe79ce..5666a44da11 100644 --- a/core/dbt/parser/sources.py +++ b/core/dbt/parser/sources.py @@ -1,15 +1,11 @@ -from dataclasses import replace import itertools +from dataclasses import replace from pathlib import Path -from typing import Iterable, Dict, Optional, Set, Any, List +from typing import Any, Dict, Iterable, List, Optional, Set from dbt.adapters.capability import Capability from dbt.adapters.factory import get_adapter -from dbt.artifacts.resources import ( - FreshnessThreshold, - SourceConfig, - Time, -) +from dbt.artifacts.resources import FreshnessThreshold, SourceConfig, Time from dbt.config import RuntimeConfig from dbt.context.context_config import ( BaseContextConfigGenerator, @@ -18,25 +14,23 @@ ) from dbt.contracts.graph.manifest import Manifest, SourceKey from dbt.contracts.graph.nodes import ( - UnpatchedSourceDefinition, - SourceDefinition, GenericTestNode, + SourceDefinition, + UnpatchedSourceDefinition, ) from dbt.contracts.graph.unparsed import ( - UnparsedSourceDefinition, SourcePatch, SourceTablePatch, - UnparsedSourceTableDefinition, UnparsedColumn, + UnparsedSourceDefinition, + UnparsedSourceTableDefinition, ) -from dbt_common.events.functions import warn_or_error, fire_event -from dbt.events.types import UnusedTables, FreshnessConfigProblem - -from dbt_common.exceptions import DbtInternalError +from dbt.events.types import FreshnessConfigProblem, UnusedTables from dbt.node_types import NodeType - from dbt.parser.common import ParserRef from dbt.parser.schema_generic_tests import SchemaGenericTestParser +from dbt_common.events.functions import fire_event, warn_or_error +from dbt_common.exceptions import DbtInternalError # An UnparsedSourceDefinition is taken directly from the yaml @@ -133,7 +127,14 @@ def parse_source(self, target: UnpatchedSourceDefinition) -> SourceDefinition: unique_id = target.unique_id description = table.description or "" source_description = source.description or "" - loaded_at_field = table.loaded_at_field or source.loaded_at_field + + # We need to be able to tell the difference between explicitly setting the loaded_at_field to None/null + # and when it's simply not set. This allows a user to override the source level loaded_at_field so that + # specific table can default to metadata-based freshness. + if table.loaded_at_field_present or table.loaded_at_field is not None: + loaded_at_field = table.loaded_at_field + else: + loaded_at_field = source.loaded_at_field # may be None, that's okay freshness = merge_freshness(source.freshness, table.freshness) quoting = source.quoting.merged(table.quoting) diff --git a/core/dbt/parser/sql.py b/core/dbt/parser/sql.py index 5294f3657aa..ffd16ccc015 100644 --- a/core/dbt/parser/sql.py +++ b/core/dbt/parser/sql.py @@ -3,13 +3,13 @@ from typing import Iterable from dbt.contracts.graph.manifest import SourceFile -from dbt.contracts.graph.nodes import SqlNode, Macro +from dbt.contracts.graph.nodes import Macro, SqlNode from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt_common.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.parser.base import SimpleSQLParser from dbt.parser.macros import MacroParser from dbt.parser.search import FileBlock +from dbt_common.exceptions import DbtInternalError @dataclass diff --git a/core/dbt/parser/unit_tests.py b/core/dbt/parser/unit_tests.py index 0abadca5cf9..6057ea77cf3 100644 --- a/core/dbt/parser/unit_tests.py +++ b/core/dbt/parser/unit_tests.py @@ -1,42 +1,41 @@ -from csv import DictReader -from copy import deepcopy -from pathlib import Path -from typing import List, Set, Dict, Any, Optional +import csv import os +from copy import deepcopy +from csv import DictReader from io import StringIO -import csv - -from dbt_extractor import py_extract_from_source, ExtractionError # type: ignore +from pathlib import Path +from typing import Any, Dict, List, Optional, Set from dbt import utils +from dbt.artifacts.resources import ModelConfig, UnitTestConfig, UnitTestFormat from dbt.config import RuntimeConfig from dbt.context.context_config import ContextConfig from dbt.context.providers import generate_parse_exposure, get_rendered from dbt.contracts.files import FileHash, SchemaSourceFile from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.model_config import UnitTestNodeConfig -from dbt.artifacts.resources import ModelConfig, UnitTestConfig, UnitTestFormat from dbt.contracts.graph.nodes import ( + DependsOn, ModelNode, - UnitTestNode, UnitTestDefinition, - DependsOn, + UnitTestNode, UnitTestSourceDefinition, ) from dbt.contracts.graph.unparsed import UnparsedUnitTest -from dbt.exceptions import ParsingError, InvalidUnitTestGivenInput +from dbt.exceptions import InvalidUnitTestGivenInput, ParsingError from dbt.graph import UniqueId from dbt.node_types import NodeType from dbt.parser.schemas import ( + JSONValidationError, + ParseResult, SchemaParser, - YamlBlock, ValidationError, - JSONValidationError, + YamlBlock, YamlParseDictError, YamlReader, - ParseResult, ) from dbt.utils import get_pseudo_test_path +from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore class UnitTestManifestLoader: @@ -152,7 +151,10 @@ def parse_unit_test_case(self, test_case: UnitTestDefinition): NodeType.Seed, NodeType.Snapshot, ): - input_node = ModelNode(**common_fields) + input_node = ModelNode( + **common_fields, + defer_relation=original_input_node.defer_relation, + ) if ( original_input_node.resource_type == NodeType.Model and original_input_node.version @@ -363,11 +365,17 @@ def _validate_and_normalize_rows(self, ut_fixture, unit_test_definition, fixture ) if ut_fixture.fixture: - ut_fixture.rows = self.get_fixture_file_rows( + csv_rows = self.get_fixture_file_rows( ut_fixture.fixture, self.project.project_name, unit_test_definition.unique_id ) else: - ut_fixture.rows = self._convert_csv_to_list_of_dicts(ut_fixture.rows) + csv_rows = self._convert_csv_to_list_of_dicts(ut_fixture.rows) + + # Empty values (e.g. ,,) in a csv fixture should default to null, not "" + ut_fixture.rows = [ + {k: (None if v == "" else v) for k, v in row.items()} for row in csv_rows + ] + elif ut_fixture.format == UnitTestFormat.SQL: if not (isinstance(ut_fixture.rows, str) or isinstance(ut_fixture.fixture, str)): raise ParsingError( diff --git a/core/dbt/plugins/__init__.py b/core/dbt/plugins/__init__.py index e6ed7198d80..37744d655e4 100644 --- a/core/dbt/plugins/__init__.py +++ b/core/dbt/plugins/__init__.py @@ -1,10 +1,7 @@ from typing import Optional -from .manager import PluginManager - # these are just exports, they need "noqa" so flake8 will not complain. -from .manager import dbtPlugin, dbt_hook # noqa - +from .manager import PluginManager, dbt_hook, dbtPlugin # noqa PLUGIN_MANAGER: Optional[PluginManager] = None diff --git a/core/dbt/plugins/contracts.py b/core/dbt/plugins/contracts.py index b0342c72a34..f6f5b4d6543 100644 --- a/core/dbt/plugins/contracts.py +++ b/core/dbt/plugins/contracts.py @@ -1,9 +1,9 @@ from typing import Dict # just exports, they need "noqa" so flake8 will not complain. -from dbt.artifacts.schemas.base import ArtifactMixin as PluginArtifact, schema_version # noqa +from dbt.artifacts.schemas.base import ArtifactMixin as PluginArtifact # noqa from dbt.artifacts.schemas.base import BaseArtifactMetadata # noqa -from dbt_common.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin # noqa - +from dbt.artifacts.schemas.base import schema_version # noqa +from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin # noqa PluginArtifacts = Dict[str, PluginArtifact] diff --git a/core/dbt/plugins/manager.py b/core/dbt/plugins/manager.py index 118d191f3af..5ef8b8fb735 100644 --- a/core/dbt/plugins/manager.py +++ b/core/dbt/plugins/manager.py @@ -2,14 +2,14 @@ import importlib import pkgutil from types import ModuleType -from typing import Dict, List, Callable, Mapping +from typing import Callable, Dict, List, Mapping +import dbt.tracking from dbt.contracts.graph.manifest import Manifest -from dbt_common.tests import test_caching_enabled -from dbt_common.exceptions import DbtRuntimeError from dbt.plugins.contracts import PluginArtifacts from dbt.plugins.manifest import PluginNodes -import dbt.tracking +from dbt_common.exceptions import DbtRuntimeError +from dbt_common.tests import test_caching_enabled def dbt_hook(func): diff --git a/core/dbt/plugins/manifest.py b/core/dbt/plugins/manifest.py index 9b1080e3afa..2c39599dba2 100644 --- a/core/dbt/plugins/manifest.py +++ b/core/dbt/plugins/manifest.py @@ -1,13 +1,13 @@ from dataclasses import dataclass, field from typing import Dict -from dbt.contracts.graph.node_args import ModelNodeArgs +from dbt.artifacts.resources import NodeVersion # noqa # all these are just exports, they need "noqa" so flake8 will not complain. from dbt.contracts.graph.manifest import Manifest # noqa -from dbt.node_types import AccessType, NodeType # noqa -from dbt.artifacts.resources import NodeVersion # noqa +from dbt.contracts.graph.node_args import ModelNodeArgs from dbt.graph.graph import UniqueId # noqa +from dbt.node_types import AccessType, NodeType # noqa @dataclass diff --git a/core/dbt/selected_resources.py b/core/dbt/selected_resources.py index 871cf059beb..74104fa6ef8 100644 --- a/core/dbt/selected_resources.py +++ b/core/dbt/selected_resources.py @@ -1,4 +1,4 @@ -from typing import Set, Any +from typing import Any, Set SELECTED_RESOURCES = [] diff --git a/core/dbt/task/base.py b/core/dbt/task/base.py index d4c206b023c..dfade18f1ab 100644 --- a/core/dbt/task/base.py +++ b/core/dbt/task/base.py @@ -8,46 +8,48 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Set -from dbt.compilation import Compiler -import dbt_common.exceptions.base import dbt.exceptions +import dbt_common.exceptions.base from dbt import tracking +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.schemas.results import ( + NodeStatus, + RunningStatus, + RunStatus, + TimingInfo, + collect_timing_info, +) +from dbt.artifacts.schemas.run import RunResult from dbt.cli.flags import Flags +from dbt.compilation import Compiler from dbt.config import RuntimeConfig from dbt.config.profile import read_profile from dbt.constants import DBT_PROJECT_FILE_NAME from dbt.contracts.graph.manifest import Manifest -from dbt.artifacts.resources.types import NodeType -from dbt.artifacts.schemas.results import TimingInfo, collect_timing_info -from dbt.artifacts.schemas.results import NodeStatus, RunningStatus, RunStatus -from dbt.artifacts.schemas.run import RunResult -from dbt_common.events.contextvars import get_node_info -from dbt_common.events.functions import fire_event from dbt.events.types import ( - SkippingDetails, - NodeCompiling, - NodeExecuting, CatchableExceptionOnRun, - InternalErrorOnRun, GenericExceptionOnRun, - NodeConnectionReleaseError, + InternalErrorOnRun, + LogDbtProfileError, + LogDbtProjectError, LogDebugStackTrace, LogSkipBecauseError, + NodeCompiling, + NodeConnectionReleaseError, + NodeExecuting, + SkippingDetails, ) +from dbt.flags import get_flags +from dbt.graph import Graph +from dbt.task.printer import print_run_result_error +from dbt_common.events.contextvars import get_node_info +from dbt_common.events.functions import fire_event from dbt_common.exceptions import ( - DbtRuntimeError, - DbtInternalError, CompilationError, + DbtInternalError, + DbtRuntimeError, NotImplementedError, ) -from dbt.events.types import ( - LogDbtProjectError, - LogDbtProfileError, -) -from dbt.flags import get_flags -from dbt.graph import Graph -from dbt.logger import log_manager -from dbt.task.printer import print_run_result_error def read_profiles(profiles_dir=None): @@ -69,21 +71,6 @@ class BaseTask(metaclass=ABCMeta): def __init__(self, args: Flags) -> None: self.args = args - @classmethod - def pre_init_hook(cls, args: Flags): - """A hook called before the task is initialized.""" - if args.log_format == "json": - log_manager.format_json() - else: - log_manager.format_text() - - @classmethod - def set_log_format(cls): - if get_flags().LOG_FORMAT == "json": - log_manager.format_json() - else: - log_manager.format_text() - @abstractmethod def run(self): raise dbt_common.exceptions.base.NotImplementedError("Not Implemented") diff --git a/core/dbt/task/build.py b/core/dbt/task/build.py index 57f11c71bd5..e3880f69a63 100644 --- a/core/dbt/task/build.py +++ b/core/dbt/task/build.py @@ -1,23 +1,24 @@ import threading from typing import Dict, List, Set -from .run import RunTask, ModelRunner as run_model_runner -from .snapshot import SnapshotRunner as snapshot_model_runner -from .seed import SeedRunner as seed_runner -from .test import TestRunner as test_runner - from dbt.artifacts.schemas.results import NodeStatus, RunStatus from dbt.artifacts.schemas.run import RunResult from dbt.cli.flags import Flags from dbt.config.runtime import RuntimeConfig from dbt.contracts.graph.manifest import Manifest -from dbt.graph import ResourceTypeSelector, GraphQueue, Graph +from dbt.events.types import LogNodeNoOpResult +from dbt.exceptions import DbtInternalError +from dbt.graph import Graph, GraphQueue, ResourceTypeSelector from dbt.node_types import NodeType -from dbt.task.test import TestSelector from dbt.task.base import BaseRunner, resource_types_from_args +from dbt.task.test import TestSelector from dbt_common.events.functions import fire_event -from dbt.events.types import LogNodeNoOpResult -from dbt.exceptions import DbtInternalError + +from .run import ModelRunner as run_model_runner +from .run import RunTask +from .seed import SeedRunner as seed_runner +from .snapshot import SnapshotRunner as snapshot_model_runner +from .test import TestRunner as test_runner class SavedQueryRunner(BaseRunner): diff --git a/core/dbt/task/clean.py b/core/dbt/task/clean.py index c4e98f5db2b..d7bfb92fc9a 100644 --- a/core/dbt/task/clean.py +++ b/core/dbt/task/clean.py @@ -2,19 +2,12 @@ from shutil import rmtree from dbt import deprecations -from dbt_common.events.functions import fire_event -from dbt.events.types import ( - CheckCleanPath, - ConfirmCleanPath, - FinishedCleanPaths, -) -from dbt_common.exceptions import DbtRuntimeError from dbt.cli.flags import Flags from dbt.config.project import Project -from dbt.task.base import ( - BaseTask, - move_to_nearest_project_dir, -) +from dbt.events.types import CheckCleanPath, ConfirmCleanPath, FinishedCleanPaths +from dbt.task.base import BaseTask, move_to_nearest_project_dir +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtRuntimeError class CleanTask(BaseTask): diff --git a/core/dbt/task/clone.py b/core/dbt/task/clone.py index 44f6a30841d..98ac7153653 100644 --- a/core/dbt/task/clone.py +++ b/core/dbt/task/clone.py @@ -1,18 +1,18 @@ import threading -from typing import AbstractSet, Any, List, Iterable, Set, Optional +from typing import AbstractSet, Any, Iterable, List, Optional, Set from dbt.adapters.base import BaseRelation +from dbt.artifacts.schemas.run import RunResult, RunStatus from dbt.clients.jinja import MacroGenerator from dbt.context.providers import generate_runtime_model_context from dbt.contracts.graph.manifest import Manifest -from dbt.artifacts.schemas.run import RunStatus, RunResult -from dbt_common.dataclass_schema import dbtClassMixin -from dbt_common.exceptions import DbtInternalError, CompilationError from dbt.graph import ResourceTypeSelector from dbt.node_types import REFABLE_NODE_TYPES from dbt.task.base import BaseRunner, resource_types_from_args from dbt.task.run import _validate_materialization_relations_dict -from dbt.task.runnable import GraphRunnableTask +from dbt.task.runnable import GraphRunnableMode, GraphRunnableTask +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.exceptions import CompilationError, DbtInternalError class CloneRunner(BaseRunner): @@ -94,6 +94,9 @@ class CloneTask(GraphRunnableTask): def raise_on_first_error(self): return False + def get_run_mode(self) -> GraphRunnableMode: + return GraphRunnableMode.Independent + def _get_deferred_manifest(self) -> Optional[Manifest]: # Unlike other commands, 'clone' always requires a state manifest # Load previous state, regardless of whether --defer flag has been set @@ -122,9 +125,8 @@ def get_model_schemas(self, adapter, selected_uids: Iterable[str]) -> Set[BaseRe def before_run(self, adapter, selected_uids: AbstractSet[str]): with adapter.connection_named("master"): - # unlike in other tasks, we want to add information from the --state manifest *before* caching! - self.defer_to_manifest(adapter, selected_uids) - # only create *our* schemas, but cache *other* schemas in addition + self.defer_to_manifest() + # only create target schemas, but also cache defer_relation schemas schemas_to_create = super().get_model_schemas(adapter, selected_uids) self.create_schemas(adapter, schemas_to_create) schemas_to_cache = self.get_model_schemas(adapter, selected_uids) diff --git a/core/dbt/task/compile.py b/core/dbt/task/compile.py index 4541dfd1567..5470c67c68d 100644 --- a/core/dbt/task/compile.py +++ b/core/dbt/task/compile.py @@ -1,22 +1,19 @@ import threading -from dbt.artifacts.schemas.run import RunStatus, RunResult -from dbt_common.events.base_types import EventLevel -from dbt_common.events.functions import fire_event -from dbt_common.events.types import Note -from dbt.events.types import ParseInlineNodeError, CompiledNode -from dbt_common.exceptions import ( - CompilationError, - DbtInternalError, - DbtBaseException as DbtException, -) - +from dbt.artifacts.schemas.run import RunResult, RunStatus +from dbt.events.types import CompiledNode, ParseInlineNodeError from dbt.graph import ResourceTypeSelector -from dbt.node_types import NodeType, EXECUTABLE_NODE_TYPES +from dbt.node_types import EXECUTABLE_NODE_TYPES, NodeType from dbt.parser.manifest import process_node from dbt.parser.sql import SqlBlockParser from dbt.task.base import BaseRunner from dbt.task.runnable import GraphRunnableTask +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.exceptions import CompilationError +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.exceptions import DbtInternalError class CompileRunner(BaseRunner): diff --git a/core/dbt/task/debug.py b/core/dbt/task/debug.py index b388e4336ba..53eca1e53bc 100644 --- a/core/dbt/task/debug.py +++ b/core/dbt/task/debug.py @@ -3,34 +3,28 @@ import os import platform import sys - from collections import namedtuple from enum import Flag from pathlib import Path -from typing import Optional, Dict, Any, List, Tuple +from typing import Any, Dict, List, Optional, Tuple -from dbt_common.events.functions import fire_event -from dbt.events.types import ( - OpenCommand, - DebugCmdOut, - DebugCmdResult, -) -import dbt_common.clients.system import dbt.exceptions +import dbt_common.clients.system import dbt_common.exceptions from dbt.adapters.factory import get_adapter, register_adapter -from dbt.cli.flags import Flags -from dbt.config import PartialProject, Project, Profile -from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer from dbt.artifacts.schemas.results import RunStatus +from dbt.cli.flags import Flags from dbt.clients.yaml_helper import load_yaml_text +from dbt.config import PartialProject, Profile, Project +from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer +from dbt.events.types import DebugCmdOut, DebugCmdResult, OpenCommand from dbt.links import ProfileConfigDocs -from dbt_common.ui import green, red -from dbt_common.events.format import pluralize from dbt.mp_context import get_mp_context -from dbt.version import get_installed_version - from dbt.task.base import BaseTask, get_nearest_project_dir +from dbt.version import get_installed_version +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.ui import green, red ONLY_PROFILE_MESSAGE = """ A `dbt_project.yml` file was not found in this directory. diff --git a/core/dbt/task/deps.py b/core/dbt/task/deps.py index 0f8e45f073f..3e92154cf17 100644 --- a/core/dbt/task/deps.py +++ b/core/dbt/task/deps.py @@ -1,25 +1,21 @@ +import json from hashlib import sha1 -from typing import Any, Dict, Optional, List -import yaml from pathlib import Path -import dbt.utils +from typing import Any, Dict, List, Optional + +import yaml + import dbt.deprecations import dbt.exceptions -import json - +import dbt.utils +from dbt.config import Project +from dbt.config.project import load_yml_dict, package_config_from_data from dbt.config.renderer import PackageRenderer -from dbt.config.project import package_config_from_data, load_yml_dict from dbt.constants import PACKAGE_LOCK_FILE_NAME, PACKAGE_LOCK_HASH_KEY +from dbt.contracts.project import PackageSpec from dbt.deps.base import downloads_directory -from dbt.deps.resolver import resolve_lock_packages, resolve_packages from dbt.deps.registry import RegistryPinnedPackage -from dbt.contracts.project import PackageSpec - - -from dbt_common.events.functions import fire_event -from dbt_common.events.types import ( - Formatting, -) +from dbt.deps.resolver import resolve_lock_packages, resolve_packages from dbt.events.types import ( DepsAddPackage, DepsFoundDuplicatePackage, @@ -32,11 +28,10 @@ DepsUpdateAvailable, DepsUpToDate, ) -from dbt_common.clients import system - from dbt.task.base import BaseTask, move_to_nearest_project_dir - -from dbt.config import Project +from dbt_common.clients import system +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Formatting class dbtPackageDumper(yaml.Dumper): diff --git a/core/dbt/task/docs/generate.py b/core/dbt/task/docs/generate.py index 800f997268d..7f238cf4e44 100644 --- a/core/dbt/task/docs/generate.py +++ b/core/dbt/task/docs/generate.py @@ -2,52 +2,48 @@ import shutil from dataclasses import replace from datetime import datetime -from typing import Dict, List, Any, Optional, Tuple, Set, Iterable -import agate from itertools import chain +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple -import dbt_common.utils.formatting -from dbt_common.dataclass_schema import ValidationError -from dbt_common.clients.system import load_file_contents - -from dbt.task.docs import DOCS_INDEX_FILE_PATH -from dbt.task.compile import CompileTask +import agate +import dbt.compilation +import dbt.exceptions +import dbt.utils +import dbt_common.utils.formatting +from dbt.adapters.events.types import ( + BuildingCatalog, + CannotGenerateDocs, + CatalogWritten, + WriteCatalogFailure, +) from dbt.adapters.factory import get_adapter - -from dbt.graph.graph import UniqueId -from dbt.contracts.graph.nodes import ResultNode -from dbt.contracts.graph.manifest import Manifest -from dbt.artifacts.schemas.results import NodeStatus from dbt.artifacts.schemas.catalog import ( - TableMetadata, - CatalogTable, + CatalogArtifact, + CatalogKey, CatalogResults, + CatalogTable, + ColumnMetadata, PrimitiveDict, - CatalogKey, - StatsItem, StatsDict, - ColumnMetadata, - CatalogArtifact, + StatsItem, + TableMetadata, ) -from dbt_common.exceptions import DbtInternalError +from dbt.artifacts.schemas.results import NodeStatus +from dbt.constants import MANIFEST_FILE_NAME +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ResultNode from dbt.exceptions import AmbiguousCatalogMatchError from dbt.graph import ResourceTypeSelector +from dbt.graph.graph import UniqueId from dbt.node_types import EXECUTABLE_NODE_TYPES, NodeType -from dbt_common.events.functions import fire_event -from dbt.adapters.events.types import ( - WriteCatalogFailure, - CatalogWritten, - CannotGenerateDocs, - BuildingCatalog, -) from dbt.parser.manifest import write_manifest -import dbt.utils -import dbt.compilation -import dbt.exceptions -from dbt.constants import ( - MANIFEST_FILE_NAME, -) +from dbt.task.compile import CompileTask +from dbt.task.docs import DOCS_INDEX_FILE_PATH +from dbt_common.clients.system import load_file_contents +from dbt_common.dataclass_schema import ValidationError +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError CATALOG_FILENAME = "catalog.json" diff --git a/core/dbt/task/docs/index.html b/core/dbt/task/docs/index.html index 6ccb6e73107..24953e6ba2c 100644 --- a/core/dbt/task/docs/index.html +++ b/core/dbt/task/docs/index.html @@ -24,7 +24,7 @@
icons
- diff --git a/core/dbt/task/docs/serve.py b/core/dbt/task/docs/serve.py index c3e6fda61fa..c0cd4e570ac 100644 --- a/core/dbt/task/docs/serve.py +++ b/core/dbt/task/docs/serve.py @@ -6,8 +6,8 @@ import click -from dbt.task.docs import DOCS_INDEX_FILE_PATH from dbt.task.base import ConfiguredTask +from dbt.task.docs import DOCS_INDEX_FILE_PATH class ServeTask(ConfiguredTask): @@ -20,7 +20,7 @@ def run(self): if self.args.browser: webbrowser.open_new_tab(f"http://localhost:{port}") - with socketserver.TCPServer(("", port), SimpleHTTPRequestHandler) as httpd: + with socketserver.TCPServer(("127.0.0.1", port), SimpleHTTPRequestHandler) as httpd: click.echo(f"Serving docs at {port}") click.echo(f"To access from your browser, navigate to: http://localhost:{port}") click.echo("\n\n") diff --git a/core/dbt/task/freshness.py b/core/dbt/task/freshness.py index b1fe7581c30..e20671dc532 100644 --- a/core/dbt/task/freshness.py +++ b/core/dbt/task/freshness.py @@ -1,38 +1,32 @@ import os import threading import time -from typing import Optional, List, AbstractSet, Dict - -from .base import BaseRunner -from .printer import ( - print_run_result_error, -) -from .run import RunTask +from typing import AbstractSet, Dict, List, Optional +from dbt import deprecations +from dbt.adapters.base.impl import FreshnessResponse +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.capability import Capability +from dbt.adapters.contracts.connection import AdapterResponse from dbt.artifacts.schemas.freshness import ( FreshnessResult, + FreshnessStatus, PartialSourceFreshnessResult, SourceFreshnessResult, - FreshnessStatus, -) -from dbt_common.exceptions import DbtRuntimeError, DbtInternalError -from dbt_common.events.functions import fire_event -from dbt_common.events.types import Note -from dbt.events.types import ( - FreshnessCheckComplete, - LogStartLine, - LogFreshnessResult, ) +from dbt.contracts.graph.nodes import HookNode, SourceDefinition from dbt.contracts.results import RunStatus +from dbt.events.types import FreshnessCheckComplete, LogFreshnessResult, LogStartLine +from dbt.graph import ResourceTypeSelector from dbt.node_types import NodeType, RunHookType - -from dbt.adapters.capability import Capability -from dbt.adapters.contracts.connection import AdapterResponse -from dbt.adapters.base.relation import BaseRelation -from dbt.adapters.base.impl import FreshnessResponse -from dbt.contracts.graph.nodes import SourceDefinition, HookNode from dbt_common.events.base_types import EventLevel -from dbt.graph import ResourceTypeSelector +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError + +from .base import BaseRunner +from .printer import print_run_result_error +from .run import RunTask RESULT_FILE_NAME = "sources.json" @@ -240,9 +234,12 @@ def task_end_messages(self, results): fire_event(FreshnessCheckComplete()) def get_hooks_by_type(self, hook_type: RunHookType) -> List[HookNode]: + hooks = super().get_hooks_by_type(hook_type) if self.args.source_freshness_run_project_hooks: - return super().get_hooks_by_type(hook_type) + return hooks else: + if hooks: + deprecations.warn("source-freshness-project-hooks") return [] def populate_metadata_freshness_cache(self, adapter, selected_uids: AbstractSet[str]) -> None: diff --git a/core/dbt/task/init.py b/core/dbt/task/init.py index aa7e942c206..61d8f30e039 100644 --- a/core/dbt/task/init.py +++ b/core/dbt/task/init.py @@ -1,38 +1,35 @@ import copy import os -from pathlib import Path import re import shutil +from pathlib import Path from typing import Optional -import yaml import click +import yaml import dbt.config import dbt_common.clients.system +from dbt.adapters.factory import get_include_paths, load_plugin from dbt.config.profile import read_profile -from dbt_common.exceptions import DbtRuntimeError -from dbt.flags import get_flags -from dbt.version import _get_adapter_plugin_names -from dbt.adapters.factory import load_plugin, get_include_paths - from dbt.contracts.util import Identifier as ProjectName - -from dbt_common.events.functions import fire_event from dbt.events.types import ( - StarterProjectPath, ConfigFolderDirectory, + InvalidProfileTemplateYAML, NoSampleProfileFound, + ProfileWrittenWithProjectTemplateYAML, ProfileWrittenWithSample, ProfileWrittenWithTargetTemplateYAML, - ProfileWrittenWithProjectTemplateYAML, - SettingUpProfile, - InvalidProfileTemplateYAML, - ProjectNameAlreadyExists, ProjectCreated, + ProjectNameAlreadyExists, + SettingUpProfile, + StarterProjectPath, ) - +from dbt.flags import get_flags from dbt.task.base import BaseTask, move_to_nearest_project_dir +from dbt.version import _get_adapter_plugin_names +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtRuntimeError DOCS_URL = "https://docs.getdbt.com/docs/configure-your-profile" SLACK_URL = "https://community.getdbt.com/" @@ -55,7 +52,9 @@ class InitTask(BaseTask): def copy_starter_repo(self, project_name: str) -> None: # Lazy import to avoid ModuleNotFoundError - from dbt.include.starter_project import PACKAGE_PATH as starter_project_directory + from dbt.include.starter_project import ( + PACKAGE_PATH as starter_project_directory, + ) fire_event(StarterProjectPath(dir=starter_project_directory)) shutil.copytree( diff --git a/core/dbt/task/list.py b/core/dbt/task/list.py index e345bc78d94..09358df4ffe 100644 --- a/core/dbt/task/list.py +++ b/core/dbt/task/list.py @@ -1,29 +1,26 @@ import json +from dbt.cli.flags import Flags +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( Exposure, - SourceDefinition, Metric, SavedQuery, SemanticModel, + SourceDefinition, UnitTestDefinition, ) -from dbt.cli.flags import Flags -from dbt.config.runtime import RuntimeConfig -from dbt.contracts.graph.manifest import Manifest -from dbt.flags import get_flags +from dbt.events.types import NoNodesSelected from dbt.graph import ResourceTypeSelector +from dbt.node_types import NodeType from dbt.task.base import resource_types_from_args from dbt.task.runnable import GraphRunnableTask from dbt.task.test import TestSelector -from dbt.node_types import NodeType -from dbt_common.events.functions import ( - fire_event, - warn_or_error, -) -from dbt.events.types import NoNodesSelected, ListCmdOut -from dbt_common.exceptions import DbtRuntimeError, DbtInternalError from dbt_common.events.contextvars import task_contextvars +from dbt_common.events.functions import fire_event, warn_or_error +from dbt_common.events.types import PrintEvent +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError class ListTask(GraphRunnableTask): @@ -175,11 +172,8 @@ def output_results(self, results): """Log, or output a plain, newline-delimited, and ready-to-pipe list of nodes found.""" for result in results: self.node_results.append(result) - if get_flags().LOG_FORMAT == "json": - fire_event(ListCmdOut(msg=result)) - else: - # Cleaner to leave as print than to mutate the logger not to print timestamps. - print(result) + # No formatting, still get to stdout when --quiet is used + fire_event(PrintEvent(msg=result)) return self.node_results @property diff --git a/core/dbt/task/printer.py b/core/dbt/task/printer.py index 1e4d37878ab..953a967b4a2 100644 --- a/core/dbt/task/printer.py +++ b/core/dbt/task/printer.py @@ -1,27 +1,21 @@ from typing import Dict -from dbt.logger import ( - DbtStatusMessage, - TextOnly, -) -from dbt_common.events.functions import fire_event -from dbt_common.events.types import Formatting + +from dbt.artifacts.schemas.results import NodeStatus from dbt.events.types import ( - RunResultWarning, - RunResultWarningMessage, - RunResultFailure, - StatsLine, + CheckNodeTestFailure, + EndOfRunSummary, RunResultError, RunResultErrorNoMessage, + RunResultFailure, + RunResultWarning, + RunResultWarningMessage, SQLCompiledPath, - CheckNodeTestFailure, - EndOfRunSummary, + StatsLine, ) - -from dbt.tracking import InvocationProcessor -from dbt_common.events.format import pluralize - -from dbt.artifacts.schemas.results import NodeStatus from dbt.node_types import NodeType +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Formatting def get_counts(flat_nodes) -> str: @@ -69,15 +63,13 @@ def print_run_status_line(results) -> None: stats[result_type] += 1 stats["total"] += 1 - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(StatsLine(stats=stats)) def print_run_result_error(result, newline: bool = True, is_warning: bool = False) -> None: if newline: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) # set node_info for logging events node_info = None @@ -112,13 +104,11 @@ def print_run_result_error(result, newline: bool = True, is_warning: bool = Fals fire_event(RunResultErrorNoMessage(status=result.status, node_info=node_info)) if result.node.compiled_path is not None: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(SQLCompiledPath(path=result.node.compiled_path, node_info=node_info)) if result.node.should_store_failures: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event( CheckNodeTestFailure(relation_name=result.node.relation_name, node_info=node_info) ) @@ -139,21 +129,19 @@ def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None: elif r.status == NodeStatus.Warn: warnings.append(r) - with DbtStatusMessage(), InvocationProcessor(): - with TextOnly(): - fire_event(Formatting("")) - fire_event( - EndOfRunSummary( - num_errors=len(errors), - num_warnings=len(warnings), - keyboard_interrupt=keyboard_interrupt, - ) + fire_event(Formatting("")) + fire_event( + EndOfRunSummary( + num_errors=len(errors), + num_warnings=len(warnings), + keyboard_interrupt=keyboard_interrupt, ) + ) - for error in errors: - print_run_result_error(error, is_warning=False) + for error in errors: + print_run_result_error(error, is_warning=False) - for warning in warnings: - print_run_result_error(warning, is_warning=True) + for warning in warnings: + print_run_result_error(warning, is_warning=True) - print_run_status_line(results) + print_run_status_line(results) diff --git a/core/dbt/task/retry.py b/core/dbt/task/retry.py index 57724f455e0..fd943b1151f 100644 --- a/core/dbt/task/retry.py +++ b/core/dbt/task/retry.py @@ -1,15 +1,16 @@ from pathlib import Path + from click import get_current_context from click.core import ParameterSource +from dbt.artifacts.schemas.results import NodeStatus from dbt.cli.flags import Flags -from dbt.flags import set_flags, get_flags from dbt.cli.types import Command as CliCommand from dbt.config import RuntimeConfig -from dbt.artifacts.schemas.results import NodeStatus from dbt.contracts.state import load_result_state -from dbt_common.exceptions import DbtRuntimeError +from dbt.flags import get_flags, set_flags from dbt.graph import GraphQueue +from dbt.parser.manifest import parse_manifest from dbt.task.base import ConfiguredTask from dbt.task.build import BuildTask from dbt.task.clone import CloneTask @@ -20,7 +21,7 @@ from dbt.task.seed import SeedTask from dbt.task.snapshot import SnapshotTask from dbt.task.test import TestTask -from dbt.parser.manifest import parse_manifest +from dbt_common.exceptions import DbtRuntimeError RETRYABLE_STATUSES = {NodeStatus.Error, NodeStatus.Fail, NodeStatus.Skipped, NodeStatus.RuntimeErr} IGNORE_PARENT_FLAGS = { diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py index b57d39c785b..6263ee66b46 100644 --- a/core/dbt/task/run.py +++ b/core/dbt/task/run.py @@ -1,61 +1,50 @@ import functools import threading import time -from typing import List, Dict, Any, Iterable, Set, Tuple, Optional, AbstractSet - -from dbt_common.dataclass_schema import dbtClassMixin - -from .compile import CompileRunner, CompileTask - -from .printer import ( - print_run_end_messages, - get_counts, -) from datetime import datetime -from dbt import tracking -from dbt import utils +from typing import AbstractSet, Any, Dict, Iterable, List, Optional, Set, Tuple + +from dbt import tracking, utils from dbt.adapters.base import BaseRelation +from dbt.adapters.events.types import ( + DatabaseErrorRunningHook, + FinishedRunningStats, + HooksRunning, +) +from dbt.adapters.exceptions import MissingMaterializationError +from dbt.artifacts.resources import Hook +from dbt.artifacts.schemas.results import ( + BaseResult, + NodeStatus, + RunningStatus, + RunStatus, +) +from dbt.artifacts.schemas.run import RunResult from dbt.cli.flags import Flags from dbt.clients.jinja import MacroGenerator from dbt.config.runtime import RuntimeConfig from dbt.context.providers import generate_runtime_model_context -from dbt.contracts.graph.nodes import HookNode, ResultNode from dbt.contracts.graph.manifest import Manifest -from dbt.artifacts.schemas.results import NodeStatus, RunStatus, RunningStatus, BaseResult -from dbt.artifacts.schemas.run import RunResult -from dbt.artifacts.resources import Hook -from dbt.exceptions import ( - CompilationError, - DbtInternalError, - DbtRuntimeError, -) -from dbt_common.exceptions import DbtValidationError -from dbt.adapters.exceptions import MissingMaterializationError -from dbt.adapters.events.types import ( - DatabaseErrorRunningHook, - HooksRunning, - FinishedRunningStats, -) -from dbt_common.events.contextvars import log_contextvars -from dbt_common.events.functions import fire_event, get_invocation_id -from dbt_common.events.types import Formatting -from dbt_common.events.base_types import EventLevel +from dbt.contracts.graph.nodes import HookNode, ResultNode from dbt.events.types import ( - LogModelResult, - LogStartLine, LogHookEndLine, LogHookStartLine, + LogModelResult, + LogStartLine, ) -from dbt.logger import ( - TextOnly, - HookMetadata, - UniqueID, - TimestampNamed, - DbtModelState, -) +from dbt.exceptions import CompilationError, DbtInternalError, DbtRuntimeError from dbt.graph import ResourceTypeSelector from dbt.hooks import get_hook_dict from dbt.node_types import NodeType, RunHookType +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.base_types import EventLevel +from dbt_common.events.contextvars import log_contextvars +from dbt_common.events.functions import fire_event, get_invocation_id +from dbt_common.events.types import Formatting +from dbt_common.exceptions import DbtValidationError + +from .compile import CompileRunner, CompileTask +from .printer import get_counts, print_run_end_messages class Timer: @@ -357,13 +346,9 @@ def run_hooks(self, adapter, hook_type: RunHookType, extra_context) -> None: return num_hooks = len(ordered_hooks) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(HooksRunning(num_hooks=num_hooks, hook_type=hook_type)) - startctx = TimestampNamed("node_started_at") - finishctx = TimestampNamed("node_finished_at") - for idx, hook in enumerate(ordered_hooks, start=1): # We want to include node_info in the appropriate log files, so use # log_contextvars @@ -374,47 +359,42 @@ def run_hooks(self, adapter, hook_type: RunHookType, extra_context) -> None: sql = self.get_hook_sql(adapter, hook, idx, num_hooks, extra_context) hook_text = "{}.{}.{}".format(hook.package_name, hook_type, hook.index) - hook_meta_ctx = HookMetadata(hook, self.index_offset(idx)) - with UniqueID(hook.unique_id): - with hook_meta_ctx, startctx: - fire_event( - LogHookStartLine( - statement=hook_text, - index=idx, - total=num_hooks, - node_info=hook.node_info, - ) - ) - - with Timer() as timer: - if len(sql.strip()) > 0: - response, _ = adapter.execute(sql, auto_begin=False, fetch=False) - status = response._message - else: - status = "OK" - - self.ran_hooks.append(hook) - hook.update_event_status(finished_at=datetime.utcnow().isoformat()) - with finishctx, DbtModelState({"node_status": "passed"}): - hook.update_event_status(node_status=RunStatus.Success) - fire_event( - LogHookEndLine( - statement=hook_text, - status=status, - index=idx, - total=num_hooks, - execution_time=timer.elapsed, - node_info=hook.node_info, - ) - ) + fire_event( + LogHookStartLine( + statement=hook_text, + index=idx, + total=num_hooks, + node_info=hook.node_info, + ) + ) + + with Timer() as timer: + if len(sql.strip()) > 0: + response, _ = adapter.execute(sql, auto_begin=False, fetch=False) + status = response._message + else: + status = "OK" + + self.ran_hooks.append(hook) + hook.update_event_status(finished_at=datetime.utcnow().isoformat()) + hook.update_event_status(node_status=RunStatus.Success) + fire_event( + LogHookEndLine( + statement=hook_text, + status=status, + index=idx, + total=num_hooks, + execution_time=timer.elapsed, + node_info=hook.node_info, + ) + ) # `_event_status` dict is only used for logging. Make sure # it gets deleted when we're done with it hook.clear_event_status() self._total_executed += len(ordered_hooks) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) def safe_run_hooks( self, adapter, hook_type: RunHookType, extra_context: Dict[str, Any] @@ -444,8 +424,7 @@ def print_results_line(self, results, execution_time) -> None: if execution_time is not None: execution = utils.humanize_execution_time(execution_time=execution_time) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event( FinishedRunningStats( stat_line=stat_line, execution=execution, execution_time=execution_time @@ -454,10 +433,10 @@ def print_results_line(self, results, execution_time) -> None: def before_run(self, adapter, selected_uids: AbstractSet[str]) -> None: with adapter.connection_named("master"): + self.defer_to_manifest() required_schemas = self.get_model_schemas(adapter, selected_uids) self.create_schemas(adapter, required_schemas) self.populate_adapter_cache(adapter, required_schemas) - self.defer_to_manifest(adapter, selected_uids) self.safe_run_hooks(adapter, RunHookType.Start, {}) def after_run(self, adapter, results) -> None: diff --git a/core/dbt/task/run_operation.py b/core/dbt/task/run_operation.py index 1c6c5002e27..6f7cd7b64c0 100644 --- a/core/dbt/task/run_operation.py +++ b/core/dbt/task/run_operation.py @@ -6,19 +6,19 @@ import dbt_common.exceptions from dbt.adapters.factory import get_adapter +from dbt.artifacts.schemas.results import RunStatus, TimingInfo +from dbt.artifacts.schemas.run import RunResult, RunResultsArtifact from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import HookNode -from dbt.artifacts.schemas.results import RunStatus, TimingInfo -from dbt.artifacts.schemas.run import RunResultsArtifact, RunResult -from dbt_common.events.functions import fire_event from dbt.events.types import ( LogDebugStackTrace, RunningOperationCaughtError, RunningOperationUncaughtError, ) -from dbt_common.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.task.base import ConfiguredTask +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError RESULT_FILE_NAME = "run_results.json" diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py index 6593053c285..a01e7a06c22 100644 --- a/core/dbt/task/runnable.py +++ b/core/dbt/task/runnable.py @@ -5,63 +5,63 @@ from datetime import datetime from multiprocessing.dummy import Pool as ThreadPool from pathlib import Path -from typing import AbstractSet, Optional, Dict, List, Set, Tuple, Iterable - -from dbt_common.context import get_invocation_context, _INVOCATION_CONTEXT_VAR -import dbt_common.utils.formatting +from typing import AbstractSet, Dict, Iterable, List, Optional, Set, Tuple import dbt.exceptions import dbt.tracking import dbt.utils +import dbt_common.utils.formatting from dbt.adapters.base import BaseRelation from dbt.adapters.factory import get_adapter +from dbt.artifacts.schemas.results import ( + BaseResult, + NodeStatus, + RunningStatus, + RunStatus, +) +from dbt.artifacts.schemas.run import RunExecutionResult, RunResult from dbt.cli.flags import Flags from dbt.config.runtime import RuntimeConfig from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ResultNode -from dbt.artifacts.schemas.results import NodeStatus, RunningStatus, RunStatus, BaseResult -from dbt.artifacts.schemas.run import RunExecutionResult, RunResult from dbt.contracts.state import PreviousState -from dbt_common.events.contextvars import log_contextvars, task_contextvars -from dbt_common.events.functions import fire_event, warn_or_error -from dbt_common.events.types import Formatting from dbt.events.types import ( - LogCancelLine, - DefaultSelector, - NodeStart, - NodeFinished, - QueryCancelationUnsupported, ConcurrencyLine, + DefaultSelector, EndRunResult, - NothingToDo, GenericExceptionOnRun, + LogCancelLine, + NodeFinished, + NodeStart, + NothingToDo, + QueryCancelationUnsupported, ) -from dbt.exceptions import ( - DbtInternalError, - DbtRuntimeError, - FailFastError, -) -from dbt_common.exceptions import NotImplementedError +from dbt.exceptions import DbtInternalError, DbtRuntimeError, FailFastError from dbt.flags import get_flags -from dbt.graph import GraphQueue, NodeSelector, SelectionSpec, parse_difference, UniqueId -from dbt.logger import ( - DbtProcessState, - TextOnly, - UniqueID, - TimestampNamed, - DbtModelState, - ModelMetadata, - NodeCount, +from dbt.graph import ( + GraphQueue, + NodeSelector, + SelectionSpec, + UniqueId, + parse_difference, ) from dbt.parser.manifest import write_manifest -from dbt.task.base import ConfiguredTask, BaseRunner -from .printer import ( - print_run_result_error, - print_run_end_messages, -) +from dbt.task.base import BaseRunner, ConfiguredTask +from dbt_common.context import _INVOCATION_CONTEXT_VAR, get_invocation_context +from dbt_common.dataclass_schema import StrEnum +from dbt_common.events.contextvars import log_contextvars, task_contextvars +from dbt_common.events.functions import fire_event, warn_or_error +from dbt_common.events.types import Formatting +from dbt_common.exceptions import NotImplementedError + +from .printer import print_run_end_messages, print_run_result_error RESULT_FILE_NAME = "run_results.json" -RUNNING_STATE = DbtProcessState("running") + + +class GraphRunnableMode(StrEnum): + Topological = "topological" + Independent = "independent" class GraphRunnableTask(ConfiguredTask): @@ -108,13 +108,6 @@ def exclusion_arg(self): def get_selection_spec(self) -> SelectionSpec: default_selector_name = self.config.get_default_selector_name() - # TODO: The "eager" string below needs to be replaced with programatic access - # to the default value for the indirect selection parameter in - # dbt.cli.params.indirect_selection - # - # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - indirect_selection = getattr(self.args, "INDIRECT_SELECTION", "eager") - if self.args.selector: # use pre-defined selector (--selector) spec = self.config.get_selector(self.args.selector) @@ -125,7 +118,7 @@ def get_selection_spec(self) -> SelectionSpec: else: # This is what's used with no default selector and no selection # use --select and --exclude args - spec = parse_difference(self.selection_arg, self.exclusion_arg, indirect_selection) + spec = parse_difference(self.selection_arg, self.exclusion_arg) # mypy complains because the return values of get_selector and parse_difference # are different return spec # type: ignore @@ -134,7 +127,7 @@ def get_selection_spec(self) -> SelectionSpec: def get_node_selector(self) -> NodeSelector: raise NotImplementedError(f"get_node_selector not implemented for task {type(self)}") - def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): + def defer_to_manifest(self): deferred_manifest = self._get_deferred_manifest() if deferred_manifest is None: return @@ -142,22 +135,21 @@ def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): raise DbtInternalError( "Expected to defer to manifest, but there is no runtime manifest to defer from!" ) - self.manifest.merge_from_artifact( - adapter=adapter, - other=deferred_manifest, - selected=selected_uids, - favor_state=bool(self.args.favor_state), - ) - # We're rewriting the manifest because it's been mutated during merge_from_artifact. - # This is to reflect which nodes had been deferred to (= replaced with) their counterparts. - if self.args.write_json: - write_manifest(self.manifest, self.config.project_target_path) + self.manifest.merge_from_artifact(other=deferred_manifest) def get_graph_queue(self) -> GraphQueue: selector = self.get_node_selector() # Following uses self.selection_arg and self.exclusion_arg spec = self.get_selection_spec() - return selector.get_graph_queue(spec) + + preserve_edges = True + if self.get_run_mode() == GraphRunnableMode.Independent: + preserve_edges = False + + return selector.get_graph_queue(spec, preserve_edges) + + def get_run_mode(self) -> GraphRunnableMode: + return GraphRunnableMode.Topological def _runtime_initialize(self): self.compile_manifest() @@ -210,59 +202,48 @@ def get_runner(self, node) -> BaseRunner: return cls(self.config, adapter, node, run_count, num_nodes) def call_runner(self, runner: BaseRunner) -> RunResult: - uid_context = UniqueID(runner.node.unique_id) - with RUNNING_STATE, uid_context, log_contextvars(node_info=runner.node.node_info): - startctx = TimestampNamed("node_started_at") - index = self.index_offset(runner.node_index) + with log_contextvars(node_info=runner.node.node_info): runner.node.update_event_status( started_at=datetime.utcnow().isoformat(), node_status=RunningStatus.Started ) - extended_metadata = ModelMetadata(runner.node, index) - - with startctx, extended_metadata: - fire_event( - NodeStart( - node_info=runner.node.node_info, - ) + fire_event( + NodeStart( + node_info=runner.node.node_info, ) - status: Dict[str, str] = {} - result = None - thread_exception = None + ) try: result = runner.run_with_hooks(self.manifest) except Exception as e: thread_exception = e finally: - finishctx = TimestampNamed("finished_at") - with finishctx, DbtModelState(status): - if result is not None: - fire_event( - NodeFinished( - node_info=runner.node.node_info, - run_result=result.to_msg_dict(), - ) - ) - else: - msg = f"Exception on worker thread. {thread_exception}" - - fire_event( - GenericExceptionOnRun( - unique_id=runner.node.unique_id, - exc=str(thread_exception), - node_info=runner.node.node_info, - ) + if result is not None: + fire_event( + NodeFinished( + node_info=runner.node.node_info, + run_result=result.to_msg_dict(), ) + ) + else: + msg = f"Exception on worker thread. {thread_exception}" - result = RunResult( - status=RunStatus.Error, # type: ignore - timing=[], - thread_id="", - execution_time=0.0, - adapter_response={}, - message=msg, - failures=None, - node=runner.node, + fire_event( + GenericExceptionOnRun( + unique_id=runner.node.unique_id, + exc=str(thread_exception), + node_info=runner.node.node_info, ) + ) + + result = RunResult( + status=RunStatus.Error, # type: ignore + timing=[], + thread_id="", + execution_time=0.0, + adapter_response={}, + message=msg, + failures=None, + node=runner.node, + ) # `_event_status` dict is only used for logging. Make sure # it gets deleted when we're done with it @@ -398,15 +379,12 @@ def execute_nodes(self): num_threads = self.config.threads target_name = self.config.target_name - # following line can be removed when legacy logger is removed - with NodeCount(self.num_nodes): - fire_event( - ConcurrencyLine( - num_threads=num_threads, target_name=target_name, node_count=self.num_nodes - ) + fire_event( + ConcurrencyLine( + num_threads=num_threads, target_name=target_name, node_count=self.num_nodes ) - with TextOnly(): - fire_event(Formatting("")) + ) + fire_event(Formatting("")) pool = ThreadPool(num_threads, self._pool_thread_initializer, [get_invocation_context()]) try: @@ -486,8 +464,8 @@ def populate_adapter_cache( def before_run(self, adapter, selected_uids: AbstractSet[str]): with adapter.connection_named("master"): + self.defer_to_manifest() self.populate_adapter_cache(adapter) - self.defer_to_manifest(adapter, selected_uids) def after_run(self, adapter, results): pass @@ -527,8 +505,7 @@ def run(self): ) if len(self._flattened_nodes) == 0: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) warn_or_error(NothingToDo()) result = self.get_result( results=[], @@ -536,8 +513,7 @@ def run(self): elapsed_time=0.0, ) else: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) selected_uids = frozenset(n.unique_id for n in self._flattened_nodes) result = self.execute_with_hooks(selected_uids) diff --git a/core/dbt/task/seed.py b/core/dbt/task/seed.py index ce8ba0f35b9..031bcdf7feb 100644 --- a/core/dbt/task/seed.py +++ b/core/dbt/task/seed.py @@ -1,23 +1,16 @@ import random -from .run import ModelRunner, RunTask -from .printer import ( - print_run_end_messages, -) - -from dbt.artifacts.schemas.results import RunStatus, NodeStatus -from dbt_common.exceptions import DbtInternalError +from dbt.artifacts.schemas.results import NodeStatus, RunStatus +from dbt.events.types import LogSeedResult, LogStartLine, SeedHeader from dbt.graph import ResourceTypeSelector -from dbt.logger import TextOnly +from dbt.node_types import NodeType +from dbt_common.events.base_types import EventLevel from dbt_common.events.functions import fire_event from dbt_common.events.types import Formatting -from dbt_common.events.base_types import EventLevel -from dbt.events.types import ( - SeedHeader, - LogSeedResult, - LogStartLine, -) -from dbt.node_types import NodeType +from dbt_common.exceptions import DbtInternalError + +from .printer import print_run_end_messages +from .run import ModelRunner, RunTask class SeedRunner(ModelRunner): @@ -92,14 +85,12 @@ def show_table(self, result): alias = result.node.alias header = "Random sample of table: {}.{}".format(schema, alias) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(SeedHeader(header=header)) fire_event(Formatting("-" * len(header))) rand_table.print_table(max_rows=10, max_columns=None) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) def show_tables(self, results): for result in results: diff --git a/core/dbt/task/show.py b/core/dbt/task/show.py index 608ba6fdebd..a7a367c279b 100644 --- a/core/dbt/task/show.py +++ b/core/dbt/task/show.py @@ -2,16 +2,16 @@ import threading import time +from dbt.artifacts.schemas.run import RunResult, RunStatus from dbt.context.providers import generate_runtime_model_context from dbt.contracts.graph.nodes import SeedNode -from dbt.artifacts.schemas.run import RunResult, RunStatus +from dbt.events.types import ShowNode +from dbt.task.compile import CompileRunner, CompileTask +from dbt.task.seed import SeedRunner from dbt_common.events.base_types import EventLevel from dbt_common.events.functions import fire_event from dbt_common.events.types import Note -from dbt.events.types import ShowNode from dbt_common.exceptions import DbtRuntimeError -from dbt.task.compile import CompileTask, CompileRunner -from dbt.task.seed import SeedRunner class ShowRunner(CompileRunner): diff --git a/core/dbt/task/snapshot.py b/core/dbt/task/snapshot.py index 728ea49851c..5604d9cc546 100644 --- a/core/dbt/task/snapshot.py +++ b/core/dbt/task/snapshot.py @@ -1,14 +1,14 @@ -from .run import ModelRunner, RunTask - -from dbt_common.exceptions import DbtInternalError -from dbt_common.events.functions import fire_event -from dbt_common.events.base_types import EventLevel +from dbt.artifacts.schemas.results import NodeStatus from dbt.events.types import LogSnapshotResult from dbt.graph import ResourceTypeSelector from dbt.node_types import NodeType -from dbt.artifacts.schemas.results import NodeStatus +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError from dbt_common.utils import cast_dict_to_dict_of_strings +from .run import ModelRunner, RunTask + class SnapshotRunner(ModelRunner): def describe_node(self): diff --git a/core/dbt/task/sql.py b/core/dbt/task/sql.py index 6a9522133d0..2fa6f8ccb8c 100644 --- a/core/dbt/task/sql.py +++ b/core/dbt/task/sql.py @@ -1,20 +1,19 @@ +import traceback from abc import abstractmethod from datetime import datetime from typing import Generic, TypeVar -import traceback -import dbt_common.exceptions.base import dbt.exceptions +import dbt_common.exceptions.base from dbt.contracts.sql import ( RemoteCompileResult, RemoteCompileResultMixin, RemoteRunResult, ResultTable, ) -from dbt_common.events.functions import fire_event from dbt.events.types import SQLRunnerException from dbt.task.compile import CompileRunner - +from dbt_common.events.functions import fire_event SQLResult = TypeVar("SQLResult", bound=RemoteCompileResultMixin) @@ -67,7 +66,6 @@ def execute(self, compiled_node, manifest) -> RemoteCompileResult: compiled_code=compiled_node.compiled_code, node=compiled_node, timing=[], # this will get added later - logs=[], generated_at=datetime.utcnow(), ) @@ -77,7 +75,6 @@ def from_run_result(self, result, start_time, timing_info) -> RemoteCompileResul compiled_code=result.compiled_code, node=result.node, timing=timing_info, - logs=[], generated_at=datetime.utcnow(), ) @@ -97,7 +94,6 @@ def execute(self, compiled_node, manifest) -> RemoteRunResult: node=compiled_node, table=table, timing=[], - logs=[], generated_at=datetime.utcnow(), ) @@ -108,6 +104,5 @@ def from_run_result(self, result, start_time, timing_info) -> RemoteRunResult: node=result.node, table=result.table, timing=timing_info, - logs=[], generated_at=datetime.utcnow(), ) diff --git a/core/dbt/task/test.py b/core/dbt/task/test.py index 8a82c7f1243..2ae65dc3ebe 100644 --- a/core/dbt/task/test.py +++ b/core/dbt/task/test.py @@ -1,46 +1,41 @@ -import daff import io import json import re -from dataclasses import dataclass -from dbt.utils import _coerce_decimal, strtobool -from dbt_common.events.format import pluralize -from dbt_common.dataclass_schema import dbtClassMixin import threading -from typing import Dict, Any, Optional, Union, List, TYPE_CHECKING, Tuple +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union -from .compile import CompileRunner -from .run import RunTask +import daff +from dbt.adapters.exceptions import MissingMaterializationError +from dbt.artifacts.schemas.catalog import PrimitiveDict +from dbt.artifacts.schemas.results import TestStatus +from dbt.artifacts.schemas.run import RunResult +from dbt.clients.jinja import MacroGenerator +from dbt.context.providers import generate_runtime_model_context +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( + GenericTestNode, + SingularTestNode, TestNode, UnitTestDefinition, UnitTestNode, - GenericTestNode, - SingularTestNode, -) -from dbt.contracts.graph.manifest import Manifest -from dbt.artifacts.schemas.results import TestStatus -from dbt.artifacts.schemas.run import RunResult -from dbt.artifacts.schemas.catalog import PrimitiveDict -from dbt.context.providers import generate_runtime_model_context -from dbt.clients.jinja import MacroGenerator -from dbt_common.events.functions import fire_event -from dbt.events.types import ( - LogTestResult, - LogStartLine, -) -from dbt.exceptions import DbtInternalError, BooleanError -from dbt_common.exceptions import DbtBaseException, DbtRuntimeError -from dbt.adapters.exceptions import MissingMaterializationError -from dbt.graph import ( - ResourceTypeSelector, ) +from dbt.events.types import LogStartLine, LogTestResult +from dbt.exceptions import BooleanError, DbtInternalError +from dbt.flags import get_flags +from dbt.graph import ResourceTypeSelector from dbt.node_types import NodeType from dbt.parser.unit_tests import UnitTestManifestLoader -from dbt.flags import get_flags +from dbt.utils import _coerce_decimal, strtobool +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtBaseException, DbtRuntimeError from dbt_common.ui import green, red +from .compile import CompileRunner +from .run import RunTask if TYPE_CHECKING: import agate @@ -343,16 +338,17 @@ def _get_unit_test_agate_table(self, result_table, actual_or_expected: str): def _get_daff_diff( self, expected: "agate.Table", actual: "agate.Table", ordered: bool = False ) -> daff.TableDiff: - - expected_daff_table = daff.PythonTableView(list_rows_from_table(expected)) - actual_daff_table = daff.PythonTableView(list_rows_from_table(actual)) - - alignment = daff.Coopy.compareTables(expected_daff_table, actual_daff_table).align() - result = daff.PythonTableView([]) + # Sort expected and actual inputs prior to creating daff diff to ensure order insensitivity + # https://github.com/paulfitz/daff/issues/200 + expected_daff_table = daff.PythonTableView(list_rows_from_table(expected, sort=True)) + actual_daff_table = daff.PythonTableView(list_rows_from_table(actual, sort=True)) flags = daff.CompareFlags() flags.ordered = ordered + alignment = daff.Coopy.compareTables(expected_daff_table, actual_daff_table, flags).align() + result = daff.PythonTableView([]) + diff = daff.TableDiff(alignment, flags) diff.hilite(result) return diff @@ -413,10 +409,25 @@ def json_rows_from_table(table: "agate.Table") -> List[Dict[str, Any]]: # This was originally in agate_helper, but that was moved out into dbt_common -def list_rows_from_table(table: "agate.Table") -> List[Any]: - "Convert a table to a list of lists, where the first element represents the header" - rows = [[col.name for col in table.columns]] +def list_rows_from_table(table: "agate.Table", sort: bool = False) -> List[Any]: + """ + Convert given table to a list of lists, where the first element represents the header + + By default, sort is False and no sort order is applied to the non-header rows of the given table. + + If sort is True, sort the non-header rows hierarchically, treating None values as lower in order. + Examples: + * [['a','b','c'],[4,5,6],[1,2,3]] -> [['a','b','c'],[1,2,3],[4,5,6]] + * [['a','b','c'],[4,5,6],[1,null,3]] -> [['a','b','c'],[1,null,3],[4,5,6]] + * [['a','b','c'],[4,5,6],[null,2,3]] -> [['a','b','c'],[4,5,6],[null,2,3]] + """ + header = [col.name for col in table.columns] + + rows = [] for row in table.rows: rows.append(list(row.values())) - return rows + if sort: + rows = sorted(rows, key=lambda x: [(elem is None, elem) for elem in x]) + + return [header] + rows diff --git a/core/dbt/tests/fixtures/project.py b/core/dbt/tests/fixtures/project.py index 252f11116c1..5395247a74c 100644 --- a/core/dbt/tests/fixtures/project.py +++ b/core/dbt/tests/fixtures/project.py @@ -1,32 +1,35 @@ import os +import random +from argparse import Namespace +from datetime import datetime from pathlib import Path from typing import Mapping import pytest # type: ignore -import random -from argparse import Namespace -from datetime import datetime -import warnings import yaml -from dbt.mp_context import get_mp_context -from dbt.parser.manifest import ManifestLoader -from dbt_common.context import set_invocation_context -from dbt_common.exceptions import CompilationError, DbtDatabaseError -from dbt.context.providers import generate_runtime_macro_context import dbt.flags as flags -from dbt_common.tests import enable_test_caching +from dbt.adapters.factory import ( + get_adapter, + get_adapter_by_type, + register_adapter, + reset_adapters, +) from dbt.config.runtime import RuntimeConfig -from dbt.adapters.factory import get_adapter, register_adapter, reset_adapters, get_adapter_by_type -from dbt_common.events.event_manager_client import cleanup_event_logger +from dbt.context.providers import generate_runtime_macro_context from dbt.events.logging import setup_event_logger +from dbt.mp_context import get_mp_context +from dbt.parser.manifest import ManifestLoader from dbt.tests.util import ( - write_file, - run_sql_with_adapter, TestProcessingException, get_connection, + run_sql_with_adapter, + write_file, ) - +from dbt_common.context import set_invocation_context +from dbt_common.events.event_manager_client import cleanup_event_logger +from dbt_common.exceptions import CompilationError, DbtDatabaseError +from dbt_common.tests import enable_test_caching # These are the fixtures that are used in dbt core functional tests # @@ -536,9 +539,6 @@ def project( logs_dir, test_config, ): - # Logbook warnings are ignored so we don't have to fork logbook to support python 3.10. - # This _only_ works for tests in `tests/` that use the project fixture. - warnings.filterwarnings("ignore", category=DeprecationWarning, module="logbook") log_flags = Namespace( LOG_PATH=logs_dir, LOG_FORMAT="json", diff --git a/core/dbt/tests/util.py b/core/dbt/tests/util.py index 437f25aa2e2..d5c5e49d2aa 100644 --- a/core/dbt/tests/util.py +++ b/core/dbt/tests/util.py @@ -1,28 +1,27 @@ -from contextvars import ContextVar, copy_context -from io import StringIO +import json import os import shutil -import yaml -import json -import warnings +from contextlib import contextmanager +from contextvars import ContextVar, copy_context from datetime import datetime +from io import StringIO from typing import Any, Dict, List, Optional -from contextlib import contextmanager -from dbt.adapters.factory import Adapter +import yaml + +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.factory import Adapter from dbt.cli.main import dbtRunner -from dbt.logger import log_manager from dbt.contracts.graph.manifest import Manifest from dbt_common.context import _INVOCATION_CONTEXT_VAR, InvocationContext +from dbt_common.events.base_types import EventLevel from dbt_common.events.functions import ( - fire_event, capture_stdout_logs, - stop_capture_stdout_logs, + fire_event, reset_metadata_vars, + stop_capture_stdout_logs, ) -from dbt_common.events.base_types import EventLevel from dbt_common.events.types import Note -from dbt.adapters.base.relation import BaseRelation # ============================================================================= # Test utilities @@ -75,15 +74,9 @@ def run_dbt( args: Optional[List[str]] = None, expect_pass: bool = True, ): - # Ignore logbook warnings - warnings.filterwarnings("ignore", category=DeprecationWarning, module="logbook") - # reset global vars reset_metadata_vars() - # The logger will complain about already being initialized if - # we don't do this. - log_manager.reset_handlers() if args is None: args = ["run"] diff --git a/core/dbt/tracking.py b/core/dbt/tracking.py index ff7b9e7ebb9..880243e4d6e 100644 --- a/core/dbt/tracking.py +++ b/core/dbt/tracking.py @@ -6,7 +6,6 @@ from datetime import datetime from typing import Optional -import logbook import pytz import requests from packaging.version import Version @@ -15,8 +14,8 @@ from snowplow_tracker import logger as sp_logger from dbt import version as dbt_version +from dbt.adapters.exceptions import FailedToConnectError from dbt.clients.yaml_helper import safe_load, yaml # noqa:F401 -from dbt_common.events.functions import fire_event, get_invocation_id from dbt.events.types import ( DisableTracking, FlushEvents, @@ -26,7 +25,7 @@ SendingEvent, TrackingInitializeFailure, ) -from dbt.adapters.exceptions import FailedToConnectError +from dbt_common.events.functions import fire_event, get_invocation_id from dbt_common.exceptions import NotImplementedError sp_logger.setLevel(100) @@ -467,20 +466,6 @@ def do_not_track(): active_user = User(None) -class InvocationProcessor(logbook.Processor): - def __init__(self) -> None: - super().__init__() - - def process(self, record): - if active_user is not None: - record.extra.update( - { - "run_started_at": active_user.run_started_at.isoformat(), - "invocation_id": get_invocation_id(), - } - ) - - def initialize_from_flags(send_anonymous_usage_stats, profiles_dir): global active_user if send_anonymous_usage_stats: diff --git a/core/dbt/utils.py b/core/dbt/utils.py index 8f7509a5dec..05416d43344 100644 --- a/core/dbt/utils.py +++ b/core/dbt/utils.py @@ -3,34 +3,34 @@ import decimal import functools import itertools -import jinja2 import json import os -from pathlib import PosixPath, WindowsPath - -from dbt_common.utils import md5 -from dbt_common.exceptions import ( - RecursionError, -) -from dbt.exceptions import DuplicateAliasError -from dbt_common.helper_types import WarnErrorOptions -from dbt import flags +import sys from enum import Enum +from pathlib import PosixPath, WindowsPath from typing import ( - Tuple, - Type, + AbstractSet, Any, - Optional, Dict, - List, + Iterable, Iterator, + List, Mapping, - Iterable, - AbstractSet, - Set, + Optional, Sequence, + Set, + Tuple, + Type, ) +import jinja2 + +from dbt import flags +from dbt.exceptions import DuplicateAliasError +from dbt_common.exceptions import RecursionError +from dbt_common.helper_types import WarnErrorOptions +from dbt_common.utils import md5 + DECIMALS: Tuple[Type[Any], ...] try: import cdecimal # typing: ignore @@ -387,3 +387,22 @@ def strtobool(val: str) -> bool: return False else: raise ValueError("invalid truth value %r" % (val,)) + + +def try_get_max_rss_kb() -> Optional[int]: + """Attempts to get the high water mark for this process's memory use via + the most reliable and accurate mechanism available through the host OS. + Currently only implemented for Linux.""" + if sys.platform == "linux" and os.path.isfile("/proc/self/status"): + try: + # On Linux, the most reliable documented mechanism for getting the RSS + # high-water-mark comes from the line confusingly labeled VmHWM in the + # /proc/self/status virtual file. + with open("/proc/self/status") as f: + for line in f: + if line.startswith("VmHWM:"): + return int(str.split(line)[1]) + except Exception: + pass + + return None diff --git a/core/dbt/version.py b/core/dbt/version.py index 5d515185ae6..a4a219e9529 100644 --- a/core/dbt/version.py +++ b/core/dbt/version.py @@ -3,12 +3,11 @@ import importlib.util import json import os -import requests - from typing import Iterator, List, Optional, Tuple -import dbt_common.semver as semver +import requests +import dbt_common.semver as semver from dbt_common.ui import green, red, yellow PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json" @@ -229,5 +228,5 @@ def _get_adapter_plugin_names() -> Iterator[str]: yield plugin_name -__version__ = "1.8.0b3" +__version__ = "1.9.0a1" installed = get_installed_version() diff --git a/core/setup.py b/core/setup.py index 1f103c19534..e133b946fa4 100644 --- a/core/setup.py +++ b/core/setup.py @@ -25,7 +25,7 @@ package_name = "dbt-core" -package_version = "1.8.0b3" +package_version = "1.9.0a1" description = """With dbt, data analysts and engineers can build analytics \ the way engineers build applications.""" @@ -49,14 +49,10 @@ # ---- # dbt-core uses these packages deeply, throughout the codebase, and there have been breaking changes in past patch releases (even though these are major-version-one). # Pin to the patch or minor version, and bump in each new minor version of dbt-core. - "agate>=1.7.0,<1.8", + "agate>=1.7.0,<1.10", "Jinja2>=3.1.3,<4", "mashumaro[msgpack]>=3.9,<4.0", # ---- - # Legacy: This package has not been updated since 2019, and it is unused in dbt's logging system (since v1.0) - # The dependency here will be removed along with the removal of 'legacy logging', in a future release of dbt-core - "logbook>=1.5,<1.6", - # ---- # dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility # with major versions in each new minor version of dbt-core. "click>=8.0.2,<9.0", @@ -75,8 +71,8 @@ "minimal-snowplow-tracker>=0.0.2,<0.1", "dbt-semantic-interfaces>=0.5.1,<0.6", # Minor versions for these are expected to be backwards-compatible - "dbt-common>=1.0.1,<2.0", - "dbt-adapters>=0.1.0a2,<2.0", + "dbt-common>=1.1.0,<2.0", + "dbt-adapters>=1.1.1,<2.0", # ---- # Expect compatibility with all new versions of these packages, so lower bounds only. "packaging>20.9", diff --git a/dev-requirements.txt b/dev-requirements.txt index da0ee332952..52b599008ea 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,4 +1,4 @@ -git+https://github.com/dbt-labs/dbt-adapters.git +git+https://github.com/dbt-labs/dbt-adapters.git@main git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter git+https://github.com/dbt-labs/dbt-common.git@main git+https://github.com/dbt-labs/dbt-postgres.git@main @@ -11,6 +11,7 @@ flaky freezegun>=1.4.0,<1.5 hypothesis ipdb +isort>=5.12,<6 mypy==1.4.1 pip-tools pre-commit @@ -18,20 +19,17 @@ pytest>=7.4,<8.0 pytest-cov pytest-csv>=3.0,<4.0 pytest-dotenv -pytest-logbook pytest-mock pytest-split pytest-xdist sphinx tox>=3.13 -twine types-docutils types-PyYAML types-Jinja2 types-mock -types-protobuf +types-protobuf>=4.0.0,<5.0.0 types-pytz types-requests types-setuptools -wheel mocker diff --git a/docs/guides/behavior-change-flags.md b/docs/guides/behavior-change-flags.md new file mode 100644 index 00000000000..ad8b0e4fb93 --- /dev/null +++ b/docs/guides/behavior-change-flags.md @@ -0,0 +1,32 @@ +# Playbook: Behavior Change Flags + +User documentation: https://docs.getdbt.com/reference/global-configs/legacy-behaviors + +## Rules for introducing a new flag + +1. **Naming.** All behavior change flags should be named so that their default value changes from **False → True**. This makes it significantly easier for us to document them and talk about them consistently, and it's more intuitive for end users. + * (a) If the flag is prohibiting something that we previously allowed, use the verb "require." Examples: + * `require_resource_names_without_spaces` + * `require_explicit_package_overrides_for_builtin_materializations` + * (b) All flags should be of boolean type, and False by default when introduced: `bool = False`. +2. **Documentation.** Start with the docs. What is the change? Who might be affected? What action will users need to take to mitigate this change? At this point, the dates for flag Introduction + Maturity are "TBD." +3. **Deprecation warnings**. As a general rule, **all** behavior changes should be accompanied by a deprecation warning. + * (a) Always use our standard deprecations module: [https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/deprecations.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/deprecations.py) + * (b) This serves two purposes: Signalling the change to the user, and collecting telemetry so we can understand blast radius among users with telemtry enabled. + * (c) These warning messages should link back to documentation: [https://docs.getdbt.com/reference/global-configs/legacy-behaviors](https://docs.getdbt.com/reference/global-configs/legacy-behaviors#deprecate_package_materialization_builtin_override) + * (d) Even for additive behaviors that are not "breaking changes," there is still an opportunity to signal these changes for users, and to gather an estimate of the impact. E.g. `source_freshness_run_project_hooks` should still include a proactive message any time someone runs the `source freshness` command in a project that has `on-run-*` hooks defined. + * (e) The call site for these deprecation warnings should be as close as possible to the place where we’re evaluating conditional logic based on the project flag. Essentially, any time we check the flag value and it returns `False`, we should raise a deprecation warning while preserving the legacy behavior. (In the future, we might be able to streamline more of this boilerplate code.) + * (f) If users want to silence these deprecation warnings, they can do so via [`warn_error_options.silence`](https://docs.getdbt.com/reference/global-configs/warnings). Explicitly setting the flag to `False` in `dbt_project.yml` is not sufficient to silence the warning. +4. **Exceptions.** If the behavior change is to raise an exception that prohibits behavior which was previously permitted (e.g. spaces in model names), the exception message should also link to the docs on legacy behaviors. +5. **Backports.** Whenever possible, we should backport both the deprecation warning and the flag to the previous version of dbt Core. +6. **Open a GitHub issue** in the dbt-core repository that is the implementation ticket for switching the default from `false` to `true`. Add the `behavior_change_flag` issue label, and add it to the GitHub milestone for the next minor version. (This is true in most cases, see below for exceptional considerations.) During planning, we will bundle up the "introduced" behavior changes into an epic/tasklist that schedules their maturation. + +## After introduction + +1. **Maturing flag(s) by switching value from `False` → `True` in dbt-core `main`.** + * (a) This should land in **the next minor (`1.X.0`) release of dbt-core**. + * (b) If the behavior change is mitigating a security vulnerability, and the next minor release is still planned for several months away, we still backport the fix + flag (off by default) to supported OSS versions, and we strongly advise all users to opt into the flag sooner. +2. **Removing support for legacy behaviors.** + * (a) As a general rule, we will not entirely remove support for any legacy behaviors until dbt v2.0. At the same time, we are not committing to supporting them forever (à la Rust editions). We need to strike the right balance between _too fast_ and _never_. + * (b) On a case-by-case basis, if there is a strong compelling reason to remove a legacy behavior and we see minimal in-the-wild usage (<1% of relevant projects), we can remove it entirely. This needs to be communicated well in advance — at least 2 minor versions after introduction in dbt Core. + * (d) These are project configurations, not temporary feature flags. They add complexity to our codebase; that complexity compounds the more we have, and the longer we have them. Such is the price of maintaining mature v1.* software. diff --git a/docs/eli64/parsing-vs-compilation-vs-runtime.md b/docs/guides/parsing-vs-compilation-vs-runtime.md similarity index 100% rename from docs/eli64/parsing-vs-compilation-vs-runtime.md rename to docs/guides/parsing-vs-compilation-vs-runtime.md diff --git a/schemas/dbt/manifest/v12.json b/schemas/dbt/manifest/v12.json index 6c4040ea2f6..6da1edee266 100644 --- a/schemas/dbt/manifest/v12.json +++ b/schemas/dbt/manifest/v12.json @@ -13,7 +13,7 @@ }, "dbt_version": { "type": "string", - "default": "1.8.0a1" + "default": "1.8.0b3" }, "generated_at": { "type": "string" @@ -108,7 +108,7 @@ "anyOf": [ { "type": "object", - "title": "AnalysisNode", + "title": "Seed", "properties": { "database": { "anyOf": [ @@ -127,7 +127,7 @@ "type": "string" }, "resource_type": { - "const": "analysis" + "const": "seed" }, "package_name": { "type": "string" @@ -169,7 +169,7 @@ }, "config": { "type": "object", - "title": "NodeConfig", + "title": "SeedConfig", "properties": { "_extra": { "type": "object", @@ -246,7 +246,7 @@ }, "materialized": { "type": "string", - "default": "view" + "default": "seed" }, "incremental_strategy": { "anyOf": [ @@ -433,16 +433,25 @@ } }, "additionalProperties": false + }, + "delimiter": { + "type": "string", + "default": "," + }, + "quote_columns": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -630,10 +639,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -664,580 +669,432 @@ "type": "string", "default": "" }, - "language": { - "type": "string", - "default": "sql" - }, - "refs": { - "type": "array", - "items": { - "type": "object", - "title": "RefArgs", - "properties": { - "name": { - "type": "string" - }, - "package": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { + "root_path": { + "anyOf": [ + { "type": "string" + }, + { + "type": "null" } - } + ], + "default": null }, "depends_on": { "type": "object", - "title": "DependsOn", + "title": "MacroDependsOn", "properties": { "macros": { "type": "array", "items": { "type": "string" } - }, - "nodes": { - "type": "array", - "items": { - "type": "string" - } } }, "additionalProperties": false }, - "compiled_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "compiled": { - "type": "boolean", - "default": false - }, - "compiled_code": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "extra_ctes_injected": { - "type": "boolean", - "default": false - }, - "extra_ctes": { - "type": "array", - "items": { - "type": "object", - "title": "InjectedCTE", - "properties": { - "id": { - "type": "string" - }, - "sql": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "id", - "sql" - ] - } - }, - "_pre_injected_sql": { + "defer_relation": { "anyOf": [ { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "contract": { - "type": "object", - "title": "Contract", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - }, - { - "type": "object", - "title": "SingularTestNode", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "name": { - "type": "string" - }, - "resource_type": { - "const": "test" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "alias": { - "type": "string" - }, - "checksum": { - "type": "object", - "title": "FileHash", - "properties": { - "name": { - "type": "string" - }, - "checksum": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "name", - "checksum" - ] - }, - "config": { - "type": "object", - "title": "TestConfig", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "schema": { - "anyOf": [ - { + "alias": { "type": "string" }, - { - "type": "null" - } - ], - "default": "dbt_test__audit" - }, - "database": { - "anyOf": [ - { - "type": "string" + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, - { - "type": "null" - } - ], - "default": null - }, - "tags": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] }, - { - "type": "string" - } - ] - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "group": { - "anyOf": [ - { + "name": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "materialized": { - "type": "string", - "default": "test" - }, - "severity": { - "type": "string", - "default": "ERROR", - "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" - }, - "store_failures": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "store_failures_as": { - "anyOf": [ - { + "description": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "where": { - "anyOf": [ - { - "type": "string" + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, - { - "type": "null" - } - ], - "default": null - }, - "limit": { - "anyOf": [ - { - "type": "integer" + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } }, - { - "type": "null" - } - ], - "default": null - }, - "fail_calc": { - "type": "string", - "default": "count(*)" - }, - "warn_if": { - "type": "string", - "default": "!= 0" - }, - "error_if": { - "type": "string", - "default": "!= 0" - } - }, - "additionalProperties": true - }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "description": { - "type": "string", - "default": "" - }, - "columns": { - "type": "object", - "additionalProperties": { - "type": "object", - "title": "ColumnInfo", - "properties": { - "name": { - "type": "string" - }, - "description": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "data_type": { - "anyOf": [ - { + "tags": { + "type": "array", + "items": { "type": "string" - }, - { - "type": "null" } - ], - "default": null - }, - "constraints": { - "type": "array", - "items": { - "type": "object", - "title": "ColumnLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "expression": { - "anyOf": [ - { - "type": "string" + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } }, - { - "type": "null" + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false } - ], - "default": null - }, - "warn_unenforced": { - "type": "boolean", - "default": true + }, + "additionalProperties": true }, - "warn_unsupported": { - "type": "boolean", - "default": true + { + "type": "null" } - }, - "additionalProperties": false, - "required": [ - "type" ] } }, - "quote": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": true, - "required": [ - "name" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "patch_path": { - "anyOf": [ - { - "type": "string" + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] }, { "type": "null" } ], "default": null - }, - "build_path": { + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Analysis", + "properties": { + "database": { "anyOf": [ { "type": "string" @@ -1245,313 +1102,79 @@ { "type": "null" } - ], - "default": null + ] }, - "deferred": { - "type": "boolean", - "default": false + "schema": { + "type": "string" }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } + "name": { + "type": "string" }, - "created_at": { - "type": "number" + "resource_type": { + "const": "analysis" }, - "config_call_dict": { - "type": "object", - "propertyNames": { - "type": "string" - } + "package_name": { + "type": "string" }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "path": { + "type": "string" }, - "raw_code": { - "type": "string", - "default": "" + "original_file_path": { + "type": "string" }, - "language": { - "type": "string", - "default": "sql" + "unique_id": { + "type": "string" }, - "refs": { + "fqn": { "type": "array", "items": { - "type": "object", - "title": "RefArgs", - "properties": { - "name": { - "type": "string" - }, - "package": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] + "type": "string" } }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } + "alias": { + "type": "string" }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { "type": "string" } - } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] }, - "depends_on": { + "config": { "type": "object", - "title": "DependsOn", + "title": "NodeConfig", "properties": { - "macros": { - "type": "array", - "items": { + "_extra": { + "type": "object", + "propertyNames": { "type": "string" } }, - "nodes": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - }, - "compiled_path": { - "anyOf": [ - { - "type": "string" + "enabled": { + "type": "boolean", + "default": true }, - { - "type": "null" - } - ], - "default": null - }, - "compiled": { - "type": "boolean", - "default": false - }, - "compiled_code": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "extra_ctes_injected": { - "type": "boolean", - "default": false - }, - "extra_ctes": { - "type": "array", - "items": { - "type": "object", - "title": "InjectedCTE", - "properties": { - "id": { - "type": "string" - }, - "sql": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "id", - "sql" - ] - } - }, - "_pre_injected_sql": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "contract": { - "type": "object", - "title": "Contract", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - }, - { - "type": "object", - "title": "HookNode", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "name": { - "type": "string" - }, - "resource_type": { - "const": "operation" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "alias": { - "type": "string" - }, - "checksum": { - "type": "object", - "title": "FileHash", - "properties": { - "name": { - "type": "string" - }, - "checksum": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "name", - "checksum" - ] - }, - "config": { - "type": "object", - "title": "NodeConfig", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "schema": { "anyOf": [ @@ -1798,12 +1421,6 @@ }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -1991,10 +1608,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -2193,17 +1806,6 @@ } }, "additionalProperties": false - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": false, @@ -2223,7 +1825,7 @@ }, { "type": "object", - "title": "ModelNode", + "title": "SingularTest", "properties": { "database": { "anyOf": [ @@ -2242,7 +1844,7 @@ "type": "string" }, "resource_type": { - "const": "model" + "const": "test" }, "package_name": { "type": "string" @@ -2284,7 +1886,7 @@ }, "config": { "type": "object", - "title": "ModelConfig", + "title": "TestConfig", "properties": { "_extra": { "type": "object", @@ -2316,7 +1918,7 @@ "type": "null" } ], - "default": null + "default": "dbt_test__audit" }, "database": { "anyOf": [ @@ -2361,12 +1963,17 @@ }, "materialized": { "type": "string", - "default": "view" + "default": "test" }, - "incremental_strategy": { + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { "anyOf": [ { - "type": "string" + "type": "boolean" }, { "type": "null" @@ -2374,107 +1981,32 @@ ], "default": null }, - "persist_docs": { - "type": "object", - "propertyNames": { - "type": "string" - } + "store_failures_as": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "post-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } + "where": { + "anyOf": [ + { + "type": "string" }, - "additionalProperties": false, - "required": [ - "sql" - ] - } + { + "type": "null" + } + ], + "default": null }, - "pre-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "quoting": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "column_types": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "full_refresh": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "unique_key": { + "limit": { "anyOf": [ { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "string" - } + "type": "integer" }, { "type": "null" @@ -2482,90 +2014,21 @@ ], "default": null }, - "on_schema_change": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "ignore" - }, - "on_configuration_change": { - "enum": [ - "apply", - "continue", - "fail" - ] - }, - "grants": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "packages": { - "type": "array", - "items": { - "type": "string" - } - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false + "fail_calc": { + "type": "string", + "default": "count(*)" }, - "contract": { - "type": "object", - "title": "ContractConfig", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false + "warn_if": { + "type": "string", + "default": "!= 0" }, - "access": { - "enum": [ - "private", - "protected", - "public" - ], - "default": "protected" + "error_if": { + "type": "string", + "default": "!= 0" } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -2753,10 +2216,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -2955,189 +2414,36 @@ } }, "additionalProperties": false - }, - "access": { - "enum": [ - "private", - "protected", - "public" - ], - "default": "protected" - }, - "constraints": { - "type": "array", - "items": { - "type": "object", - "title": "ModelLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "expression": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "warn_unenforced": { - "type": "boolean", - "default": true - }, - "warn_unsupported": { - "type": "boolean", - "default": true - }, - "columns": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false, - "required": [ - "type" - ] - } - }, - "version": { + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "HookNode", + "properties": { + "database": { "anyOf": [ { "type": "string" }, - { - "type": "number" - }, { "type": "null" } - ], - "default": null - }, - "latest_version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - }, - "deprecation_date": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "defer_relation": { - "anyOf": [ - { - "type": "object", - "title": "DeferRelation", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "alias": { - "type": "string" - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "alias", - "relation_name" - ] - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - }, - { - "type": "object", - "title": "RPCNode", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] + ] }, "schema": { "type": "string" @@ -3146,7 +2452,7 @@ "type": "string" }, "resource_type": { - "const": "rpc" + "const": "operation" }, "package_name": { "type": "string" @@ -3456,12 +2762,6 @@ }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -3649,10 +2949,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -3851,6 +3147,17 @@ } }, "additionalProperties": false + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, @@ -3870,7 +3177,7 @@ }, { "type": "object", - "title": "SqlNode", + "title": "Model", "properties": { "database": { "anyOf": [ @@ -3889,7 +3196,7 @@ "type": "string" }, "resource_type": { - "const": "sql_operation" + "const": "model" }, "package_name": { "type": "string" @@ -3931,7 +3238,7 @@ }, "config": { "type": "object", - "title": "NodeConfig", + "title": "ModelConfig", "properties": { "_extra": { "type": "object", @@ -4195,16 +3502,18 @@ } }, "additionalProperties": false + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -4392,10 +3701,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -4594,684 +3899,504 @@ } }, "additionalProperties": false - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - }, - { - "type": "object", - "title": "GenericTestNode", - "properties": { - "test_metadata": { - "type": "object", - "title": "TestMetadata", - "properties": { - "name": { - "type": "string" - }, - "kwargs": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "namespace": { - "anyOf": [ - { + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ModelLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "columns": { + "type": "array", + "items": { "type": "string" - }, - { - "type": "null" } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } }, - "database": { + "version": { "anyOf": [ { "type": "string" }, + { + "type": "number" + }, { "type": "null" } - ] - }, - "schema": { - "type": "string" + ], + "default": null }, - "name": { - "type": "string" - }, - "resource_type": { - "const": "test" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "alias": { - "type": "string" - }, - "checksum": { - "type": "object", - "title": "FileHash", - "properties": { - "name": { + "latest_version": { + "anyOf": [ + { "type": "string" }, - "checksum": { + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "deprecation_date": { + "anyOf": [ + { "type": "string" + }, + { + "type": "null" } - }, - "additionalProperties": false, - "required": [ - "name", - "checksum" - ] + ], + "default": null }, - "config": { - "type": "object", - "title": "TestConfig", - "properties": { - "_extra": { + "defer_relation": { + "anyOf": [ + { "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - }, - "alias": { - "anyOf": [ - { - "type": "string" + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, - { - "type": "null" - } - ], - "default": null - }, - "schema": { - "anyOf": [ - { + "schema": { "type": "string" }, - { - "type": "null" - } - ], - "default": "dbt_test__audit" - }, - "database": { - "anyOf": [ - { + "alias": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "tags": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "string" - } - ] - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "group": { - "anyOf": [ - { - "type": "string" + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, - { - "type": "null" - } - ], - "default": null - }, - "materialized": { - "type": "string", - "default": "test" - }, - "severity": { - "type": "string", - "default": "ERROR", - "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" - }, - "store_failures": { - "anyOf": [ - { - "type": "boolean" + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] }, - { - "type": "null" - } - ], - "default": null - }, - "store_failures_as": { - "anyOf": [ - { + "name": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "where": { - "anyOf": [ - { + "description": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "limit": { - "anyOf": [ - { - "type": "integer" + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, - { - "type": "null" - } - ], - "default": null - }, - "fail_calc": { - "type": "string", - "default": "count(*)" - }, - "warn_if": { - "type": "string", - "default": "!= 0" - }, - "error_if": { - "type": "string", - "default": "!= 0" - } - }, - "additionalProperties": true - }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "description": { - "type": "string", - "default": "" - }, - "columns": { - "type": "object", - "additionalProperties": { - "type": "object", - "title": "ColumnInfo", - "properties": { - "name": { - "type": "string" - }, - "description": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "data_type": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "constraints": { - "type": "array", - "items": { + "meta": { "type": "object", - "title": "ColumnLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "expression": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "warn_unenforced": { - "type": "boolean", - "default": true - }, - "warn_unsupported": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false, - "required": [ - "type" - ] - } - }, - "quote": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": true, - "required": [ - "name" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "patch_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "build_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "deferred": { - "type": "boolean", - "default": false - }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "created_at": { - "type": "number" - }, - "config_call_dict": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "raw_code": { - "type": "string", - "default": "" - }, - "language": { - "type": "string", - "default": "sql" - }, - "refs": { - "type": "array", - "items": { - "type": "object", - "title": "RefArgs", - "properties": { - "name": { - "type": "string" - }, - "package": { - "anyOf": [ - { + "propertyNames": { "type": "string" - }, - { - "type": "null" } - ], - "default": null - }, - "version": { - "anyOf": [ - { + }, + "tags": { + "type": "array", + "items": { "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null }, - "depends_on": { - "type": "object", - "title": "DependsOn", - "properties": { - "macros": { + "primary_key": { + "anyOf": [ + { "type": "array", "items": { "type": "string" } }, - "nodes": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - }, - "compiled_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "compiled": { - "type": "boolean", - "default": false - }, - "compiled_code": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "extra_ctes_injected": { - "type": "boolean", - "default": false - }, - "extra_ctes": { - "type": "array", - "items": { - "type": "object", - "title": "InjectedCTE", - "properties": { - "id": { - "type": "string" - }, - "sql": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "id", - "sql" - ] - } - }, - "_pre_injected_sql": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "contract": { - "type": "object", - "title": "Contract", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "column_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "file_key_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "attached_node": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" + { + "type": "null" } ], "default": null @@ -5279,7 +4404,6 @@ }, "additionalProperties": false, "required": [ - "test_metadata", "database", "schema", "name", @@ -5295,7 +4419,7 @@ }, { "type": "object", - "title": "SnapshotNode", + "title": "SqlOperation", "properties": { "database": { "anyOf": [ @@ -5314,7 +4438,7 @@ "type": "string" }, "resource_type": { - "const": "snapshot" + "const": "sql_operation" }, "package_name": { "type": "string" @@ -5356,7 +4480,7 @@ }, "config": { "type": "object", - "title": "SnapshotConfig", + "title": "NodeConfig", "properties": { "_extra": { "type": "object", @@ -5433,7 +4557,7 @@ }, "materialized": { "type": "string", - "default": "snapshot" + "default": "view" }, "incremental_strategy": { "anyOf": [ @@ -5542,6 +4666,12 @@ { "type": "string" }, + { + "type": "array", + "items": { + "type": "string" + } + }, { "type": "null" } @@ -5614,77 +4744,10 @@ } }, "additionalProperties": false - }, - "strategy": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "target_schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "target_database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "updated_at": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "check_cols": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -5872,10 +4935,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -6074,53 +5133,6 @@ } }, "additionalProperties": false - }, - "defer_relation": { - "anyOf": [ - { - "type": "object", - "title": "DeferRelation", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "alias": { - "type": "string" - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "alias", - "relation_name" - ] - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": false, @@ -6135,13 +5147,12 @@ "unique_id", "fqn", "alias", - "checksum", - "config" + "checksum" ] }, { "type": "object", - "title": "UnitTestNode", + "title": "GenericTest", "properties": { "database": { "anyOf": [ @@ -6160,7 +5171,7 @@ "type": "string" }, "resource_type": { - "const": "unit_test" + "const": "test" }, "package_name": { "type": "string" @@ -6202,7 +5213,7 @@ }, "config": { "type": "object", - "title": "UnitTestNodeConfig", + "title": "TestConfig", "properties": { "_extra": { "type": "object", @@ -6234,7 +5245,7 @@ "type": "null" } ], - "default": null + "default": "dbt_test__audit" }, "database": { "anyOf": [ @@ -6279,12 +5290,17 @@ }, "materialized": { "type": "string", - "default": "view" + "default": "test" }, - "incremental_strategy": { - "anyOf": [ - { - "type": "string" + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { + "anyOf": [ + { + "type": "boolean" }, { "type": "null" @@ -6292,90 +5308,10 @@ ], "default": null }, - "persist_docs": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "post-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "pre-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "quoting": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "column_types": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "full_refresh": { + "store_failures_as": { "anyOf": [ { - "type": "boolean" + "type": "string" }, { "type": "null" @@ -6383,108 +5319,43 @@ ], "default": null }, - "unique_key": { + "where": { "anyOf": [ { "type": "string" }, - { - "type": "array", - "items": { - "type": "string" - } - }, { "type": "null" } ], "default": null }, - "on_schema_change": { + "limit": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "default": "ignore" - }, - "on_configuration_change": { - "enum": [ - "apply", - "continue", - "fail" - ] - }, - "grants": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "packages": { - "type": "array", - "items": { - "type": "string" - } + "default": null }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false + "fail_calc": { + "type": "string", + "default": "count(*)" }, - "contract": { - "type": "object", - "title": "ContractConfig", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false + "warn_if": { + "type": "string", + "default": "!= 0" }, - "expected_rows": { - "type": "array", - "items": { - "type": "object", - "propertyNames": { - "type": "string" - } - } + "error_if": { + "type": "string", + "default": "!= 0" } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -6672,10 +5543,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -6875,7 +5742,7 @@ }, "additionalProperties": false }, - "tested_node_unique_id": { + "column_name": { "anyOf": [ { "type": "string" @@ -6886,7 +5753,7 @@ ], "default": null }, - "this_input_node_unique_id": { + "file_key_name": { "anyOf": [ { "type": "string" @@ -6897,38 +5764,44 @@ ], "default": null }, - "overrides": { + "attached_node": { "anyOf": [ { - "type": "object", - "title": "UnitTestOverrides", - "properties": { - "macros": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "vars": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "env_vars": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": false + "type": "string" }, { "type": "null" } ], "default": null + }, + "test_metadata": { + "type": "object", + "title": "TestMetadata", + "properties": { + "name": { + "type": "string", + "default": "test" + }, + "kwargs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "namespace": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false } }, "additionalProperties": false, @@ -6948,7 +5821,7 @@ }, { "type": "object", - "title": "SeedNode", + "title": "Snapshot", "properties": { "database": { "anyOf": [ @@ -6967,7 +5840,7 @@ "type": "string" }, "resource_type": { - "const": "seed" + "const": "snapshot" }, "package_name": { "type": "string" @@ -7009,7 +5882,7 @@ }, "config": { "type": "object", - "title": "SeedConfig", + "title": "SnapshotConfig", "properties": { "_extra": { "type": "object", @@ -7086,7 +5959,7 @@ }, "materialized": { "type": "string", - "default": "seed" + "default": "snapshot" }, "incremental_strategy": { "anyOf": [ @@ -7195,12 +6068,6 @@ { "type": "string" }, - { - "type": "array", - "items": { - "type": "string" - } - }, { "type": "null" } @@ -7274,14 +6141,60 @@ }, "additionalProperties": false }, - "delimiter": { - "type": "string", - "default": "," - }, - "quote_columns": { + "strategy": { "anyOf": [ { - "type": "boolean" + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "check_cols": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } }, { "type": "null" @@ -7292,12 +6205,6 @@ }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -7485,10 +6392,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -7519,7 +6422,89 @@ "type": "string", "default": "" }, - "root_path": { + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { "anyOf": [ { "type": "string" @@ -7530,15 +6515,78 @@ ], "default": null }, - "depends_on": { - "type": "object", - "title": "MacroDependsOn", - "properties": { - "macros": { - "type": "array", - "items": { + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { "type": "string" } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false @@ -7574,438 +6622,36 @@ "type": "null" } ] - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "alias", - "relation_name" - ] - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - } - ] - }, - "propertyNames": { - "type": "string" - } - }, - "sources": { - "type": "object", - "description": "The sources defined in the dbt project and its dependencies", - "additionalProperties": { - "type": "object", - "title": "SourceDefinition", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "name": { - "type": "string" - }, - "resource_type": { - "const": "source" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "source_name": { - "type": "string" - }, - "source_description": { - "type": "string" - }, - "loader": { - "type": "string" - }, - "identifier": { - "type": "string" - }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "quoting": { - "type": "object", - "title": "Quoting", - "properties": { - "database": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "schema": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "identifier": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "column": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "loaded_at_field": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "freshness": { - "anyOf": [ - { - "type": "object", - "title": "FreshnessThreshold", - "properties": { - "warn_after": { - "anyOf": [ - { - "type": "object", - "title": "Time", - "properties": { - "count": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - }, - "period": { - "anyOf": [ - { - "enum": [ - "minute", - "hour", - "day" - ] - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - { - "type": "null" - } - ] - }, - "error_after": { - "anyOf": [ - { - "type": "object", - "title": "Time", - "properties": { - "count": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - }, - "period": { - "anyOf": [ - { - "enum": [ - "minute", - "hour", - "day" - ] - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false }, - { - "type": "null" - } - ] - }, - "filter": { - "anyOf": [ - { - "type": "string" + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - { - "type": "null" - } - ], - "default": null - }, - "external": { - "anyOf": [ - { - "type": "object", - "title": "ExternalTable", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "location": { - "anyOf": [ - { + "name": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "file_format": { - "anyOf": [ - { + "description": { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "row_format": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "tbl_properties": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "partitions": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "array", - "items": { - "type": "object", - "title": "ExternalPartition", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "name": { - "type": "string", - "default": "" - }, - "description": { - "type": "string", - "default": "" - }, - "data_type": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": true - } - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": true - }, - { - "type": "null" - } - ], - "default": null - }, - "description": { - "type": "string", - "default": "" - }, - "columns": { - "type": "object", - "additionalProperties": { - "type": "object", - "title": "ColumnInfo", - "properties": { - "name": { - "type": "string" - }, - "description": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "data_type": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "constraints": { - "type": "array", - "items": { - "type": "object", - "title": "ColumnLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { + "compiled_code": { "anyOf": [ { "type": "string" @@ -8013,1087 +6659,311 @@ { "type": "null" } - ], - "default": null + ] }, - "expression": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } }, - "warn_unenforced": { - "type": "boolean", - "default": true + "tags": { + "type": "array", + "items": { + "type": "string" + } }, - "warn_unsupported": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false, - "required": [ - "type" - ] - } - }, - "quote": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": true, - "required": [ - "name" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "source_meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "config": { - "type": "object", - "title": "SourceConfig", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": true - }, - "patch_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "created_at": { - "type": "number" - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "source_name", - "source_description", - "loader", - "identifier" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "macros": { - "type": "object", - "description": "The macros defined in the dbt project and its dependencies", - "additionalProperties": { - "type": "object", - "title": "Macro", - "properties": { - "name": { - "type": "string" - }, - "resource_type": { - "const": "macro" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "macro_sql": { - "type": "string" - }, - "depends_on": { - "type": "object", - "title": "MacroDependsOn", - "properties": { - "macros": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - }, - "description": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "patch_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "arguments": { - "type": "array", - "items": { - "type": "object", - "title": "MacroArgument", - "properties": { - "name": { - "type": "string" - }, - "type": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "description": { - "type": "string", - "default": "" - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "created_at": { - "type": "number" - }, - "supported_languages": { - "anyOf": [ - { - "type": "array", - "items": { - "enum": [ - "python", - "sql" - ] - } - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "macro_sql" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "docs": { - "type": "object", - "description": "The docs defined in the dbt project and its dependencies", - "additionalProperties": { - "type": "object", - "title": "Documentation", - "properties": { - "name": { - "type": "string" - }, - "resource_type": { - "const": "doc" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "block_contents": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "block_contents" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "exposures": { - "type": "object", - "description": "The exposures defined in the dbt project and its dependencies", - "additionalProperties": { - "type": "object", - "title": "Exposure", - "properties": { - "name": { - "type": "string" - }, - "resource_type": { - "const": "exposure" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "type": { - "enum": [ - "dashboard", - "notebook", - "analysis", - "ml", - "application" - ] - }, - "owner": { - "type": "object", - "title": "Owner", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "email": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": true - }, - "description": { - "type": "string", - "default": "" - }, - "label": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "maturity": { - "anyOf": [ - { - "enum": [ - "low", - "medium", - "high" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "config": { - "type": "object", - "title": "ExposureConfig", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": true - }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "url": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "depends_on": { - "type": "object", - "title": "DependsOn", - "properties": { - "macros": { - "type": "array", - "items": { - "type": "string" - } - }, - "nodes": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - }, - "refs": { - "type": "array", - "items": { - "type": "object", - "title": "RefArgs", - "properties": { - "name": { - "type": "string" - }, - "package": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "created_at": { - "type": "number" - } - }, - "additionalProperties": false, - "required": [ - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "type", - "owner" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "metrics": { - "type": "object", - "description": "The metrics defined in the dbt project and its dependencies", - "additionalProperties": { - "type": "object", - "title": "Metric", - "properties": { - "name": { - "type": "string" - }, - "resource_type": { - "const": "metric" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "description": { - "type": "string" - }, - "label": { - "type": "string" - }, - "type": { - "enum": [ - "simple", - "ratio", - "cumulative", - "derived", - "conversion" - ] - }, - "type_params": { - "type": "object", - "title": "MetricTypeParams", - "properties": { - "measure": { - "anyOf": [ - { - "type": "object", - "title": "MetricInputMeasure", - "properties": { - "name": { - "type": "string" - }, - "filter": { + "config": { "anyOf": [ { "type": "object", - "title": "WhereFilterIntersection", + "title": "NodeConfig", "properties": { - "where_filters": { - "type": "array", - "items": { - "type": "object", - "title": "WhereFilter", - "properties": { - "where_sql_template": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { "type": "string" } }, - "additionalProperties": false, - "required": [ - "where_sql_template" - ] - } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "join_to_timespine": { - "type": "boolean", - "default": false - }, - "fill_nulls_with": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "input_measures": { - "type": "array", - "items": { - "type": "object", - "title": "MetricInputMeasure", - "properties": { - "name": { - "type": "string" - }, - "filter": { - "anyOf": [ - { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { - "type": "array", - "items": { - "type": "object", - "title": "WhereFilter", - "properties": { - "where_sql_template": { + { "type": "string" } - }, - "additionalProperties": false, - "required": [ - "where_sql_template" ] - } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "join_to_timespine": { - "type": "boolean", - "default": false - }, - "fill_nulls_with": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "numerator": { - "anyOf": [ - { - "type": "object", - "title": "MetricInput", - "properties": { - "name": { - "type": "string" - }, - "filter": { - "anyOf": [ - { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { "type": "array", "items": { "type": "object", - "title": "WhereFilter", + "title": "Hook", "properties": { - "where_sql_template": { + "sql": { "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, "required": [ - "where_sql_template" + "sql" ] } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "offset_window": { - "anyOf": [ - { - "type": "object", - "title": "MetricTimeWindow", - "properties": { - "count": { - "type": "integer" }, - "granularity": { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] - } - }, - "additionalProperties": false, - "required": [ - "count", - "granularity" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "offset_to_grain": { - "anyOf": [ - { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "denominator": { - "anyOf": [ - { - "type": "object", - "title": "MetricInput", - "properties": { - "name": { - "type": "string" - }, - "filter": { - "anyOf": [ - { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { + "pre-hook": { "type": "array", "items": { "type": "object", - "title": "WhereFilter", + "title": "Hook", "properties": { - "where_sql_template": { + "sql": { "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, "required": [ - "where_sql_template" + "sql" ] } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "offset_window": { - "anyOf": [ - { - "type": "object", - "title": "MetricTimeWindow", - "properties": { - "count": { - "type": "integer" }, - "granularity": { + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { "enum": [ - "day", - "week", - "month", - "quarter", - "year" + "apply", + "continue", + "fail" ] - } - }, - "additionalProperties": false, - "required": [ - "count", - "granularity" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "offset_to_grain": { - "anyOf": [ - { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true }, { "type": "null" } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "expr": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "window": { - "anyOf": [ - { - "type": "object", - "title": "MetricTimeWindow", - "properties": { - "count": { - "type": "integer" - }, - "granularity": { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" ] } }, "additionalProperties": false, "required": [ - "count", - "granularity" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "grain_to_date": { - "anyOf": [ - { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" ] }, { @@ -9101,117 +6971,94 @@ } ], "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum", + "config" + ] + } + ] + }, + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "object", + "description": "The sources defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "SourceDefinition", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" }, - "metrics": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "object", - "title": "MetricInput", - "properties": { - "name": { - "type": "string" - }, - "filter": { - "anyOf": [ - { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { - "type": "array", - "items": { - "type": "object", - "title": "WhereFilter", - "properties": { - "where_sql_template": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "where_sql_template" - ] - } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "offset_window": { - "anyOf": [ - { - "type": "object", - "title": "MetricTimeWindow", - "properties": { - "count": { - "type": "integer" - }, - "granularity": { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] - } - }, - "additionalProperties": false, - "required": [ - "count", - "granularity" - ] - }, - { - "type": "null" - } - ], - "default": null - }, - "offset_to_grain": { - "anyOf": [ - { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "source" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "source_name": { + "type": "string" + }, + "source_description": { + "type": "string" + }, + "loader": { + "type": "string" + }, + "identifier": { + "type": "string" + }, + "quoting": { + "type": "object", + "title": "Quoting", + "properties": { + "database": { + "anyOf": [ + { + "type": "boolean" }, { "type": "null" @@ -9219,46 +7066,69 @@ ], "default": null }, - "conversion_type_params": { + "schema": { "anyOf": [ { - "type": "object", - "title": "ConversionTypeParams", - "properties": { - "base_measure": { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "identifier": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "column": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "loaded_at_field": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "freshness": { + "anyOf": [ + { + "type": "object", + "title": "FreshnessThreshold", + "properties": { + "warn_after": { + "anyOf": [ + { "type": "object", - "title": "MetricInputMeasure", + "title": "Time", "properties": { - "name": { - "type": "string" - }, - "filter": { + "count": { "anyOf": [ { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { - "type": "array", - "items": { - "type": "object", - "title": "WhereFilter", - "properties": { - "where_sql_template": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "where_sql_template" - ] - } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] + "type": "integer" }, { "type": "null" @@ -9266,22 +7136,36 @@ ], "default": null }, - "alias": { + "period": { "anyOf": [ { - "type": "string" + "enum": [ + "minute", + "hour", + "day" + ] }, { "type": "null" } ], "default": null - }, - "join_to_timespine": { - "type": "boolean", - "default": false - }, - "fill_nulls_with": { + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "error_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { "anyOf": [ { "type": "integer" @@ -9291,120 +7175,217 @@ } ], "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - }, - "conversion_measure": { - "type": "object", - "title": "MetricInputMeasure", - "properties": { - "name": { - "type": "string" }, - "filter": { + "period": { "anyOf": [ { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { - "type": "array", - "items": { - "type": "object", - "title": "WhereFilter", - "properties": { - "where_sql_template": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "where_sql_template" - ] - } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" + "enum": [ + "minute", + "hour", + "day" ] }, { "type": "null" } - ], - "default": null - }, - "alias": { - "anyOf": [ - { + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "filter": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "external": { + "anyOf": [ + { + "type": "object", + "title": "ExternalTable", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "location": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "row_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tbl_properties": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "partitions": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "array", + "items": { + "type": "object", + "title": "ExternalPartition", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "name": { + "type": "string", + "default": "" + }, + "description": { + "type": "string", + "default": "" + }, + "data_type": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { "type": "string" - }, - { - "type": "null" } - ], - "default": null - }, - "join_to_timespine": { - "type": "boolean", - "default": false + } }, - "fill_nulls_with": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - }, - "entity": { - "type": "string" + "additionalProperties": true + } }, - "calculation": { + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { "enum": [ - "conversions", - "conversion_rate" - ], - "default": "conversion_rate" + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] }, - "window": { + "name": { "anyOf": [ { - "type": "object", - "title": "MetricTimeWindow", - "properties": { - "count": { - "type": "integer" - }, - "granularity": { - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] - } - }, - "additionalProperties": false, - "required": [ - "count", - "granularity" - ] + "type": "string" }, { "type": "null" @@ -9412,41 +7393,413 @@ ], "default": null }, - "constant_properties": { + "expression": { "anyOf": [ { - "type": "array", - "items": { - "type": "object", - "title": "ConstantPropertyInput", - "properties": { - "base_property": { - "type": "string" - }, - "conversion_property": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "base_property", - "conversion_property" - ] - } + "type": "string" }, { "type": "null" } ], "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true } }, - "additionalProperties": false, - "required": [ - "base_measure", - "conversion_measure", - "entity" - ] + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "source_meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "SourceConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": true + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "source_name", + "source_description", + "loader", + "identifier" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "macros": { + "type": "object", + "description": "The macros defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Macro", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "macro" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "macro_sql": { + "type": "string" + }, + "depends_on": { + "type": "object", + "title": "MacroDependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "arguments": { + "type": "array", + "items": { + "type": "object", + "title": "MacroArgument", + "properties": { + "name": { + "type": "string" + }, + "type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "created_at": { + "type": "number" + }, + "supported_languages": { + "anyOf": [ + { + "type": "array", + "items": { + "enum": [ + "python", + "sql" + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "macro_sql" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "docs": { + "type": "object", + "description": "The docs defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Documentation", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "doc" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "block_contents": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "block_contents" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "exposures": { + "type": "object", + "description": "The exposures defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Exposure", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "exposure" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "type": { + "enum": [ + "dashboard", + "notebook", + "analysis", + "ml", + "application" + ] + }, + "owner": { + "type": "object", + "title": "Owner", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "name": { + "anyOf": [ + { + "type": "string" }, { "type": "null" @@ -9455,35 +7808,16 @@ "default": null } }, - "additionalProperties": false + "additionalProperties": true }, - "filter": { + "description": { + "type": "string", + "default": "" + }, + "label": { "anyOf": [ { - "type": "object", - "title": "WhereFilterIntersection", - "properties": { - "where_filters": { - "type": "array", - "items": { - "type": "object", - "title": "WhereFilter", - "properties": { - "where_sql_template": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "where_sql_template" - ] - } - } - }, - "additionalProperties": false, - "required": [ - "where_filters" - ] + "type": "string" }, { "type": "null" @@ -9491,45 +7825,13 @@ ], "default": null }, - "metadata": { + "maturity": { "anyOf": [ { - "type": "object", - "title": "SourceFileMetadata", - "properties": { - "repo_file_path": { - "type": "string" - }, - "file_slice": { - "type": "object", - "title": "FileSlice", - "properties": { - "filename": { - "type": "string" - }, - "content": { - "type": "string" - }, - "start_line_number": { - "type": "integer" - }, - "end_line_number": { - "type": "integer" - } - }, - "additionalProperties": false, - "required": [ - "filename", - "content", - "start_line_number", - "end_line_number" - ] - } - }, - "additionalProperties": false, - "required": [ - "repo_file_path", - "file_slice" + "enum": [ + "low", + "medium", + "high" ] }, { @@ -9552,7 +7854,7 @@ }, "config": { "type": "object", - "title": "MetricConfig", + "title": "ExposureConfig", "properties": { "_extra": { "type": "object", @@ -9563,17 +7865,6 @@ "enabled": { "type": "boolean", "default": true - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": true @@ -9584,14 +7875,16 @@ "type": "string" } }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { + "url": { + "anyOf": [ + { "type": "string" + }, + { + "type": "null" } - } + ], + "default": null }, "depends_on": { "type": "object", @@ -9653,6 +7946,15 @@ ] } }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, "metrics": { "type": "array", "items": { @@ -9664,17 +7966,6 @@ }, "created_at": { "type": "number" - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": false, @@ -9686,28 +7977,26 @@ "original_file_path", "unique_id", "fqn", - "description", - "label", "type", - "type_params" + "owner" ] }, "propertyNames": { "type": "string" } }, - "groups": { + "metrics": { "type": "object", - "description": "The groups defined in the dbt project", + "description": "The metrics defined in the dbt project and its dependencies", "additionalProperties": { "type": "object", - "title": "Group", + "title": "Metric", "properties": { "name": { "type": "string" }, "resource_type": { - "const": "group" + "const": "metric" }, "package_name": { "type": "string" @@ -9721,20 +8010,105 @@ "unique_id": { "type": "string" }, - "owner": { + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string" + }, + "label": { + "type": "string" + }, + "type": { + "enum": [ + "simple", + "ratio", + "cumulative", + "derived", + "conversion" + ] + }, + "type_params": { "type": "object", - "title": "Owner", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "email": { + "title": "MetricTypeParams", + "properties": { + "measure": { "anyOf": [ { - "type": "string" + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] }, { "type": "null" @@ -9742,289 +8116,407 @@ ], "default": null }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": true - } - }, - "additionalProperties": false, - "required": [ - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "owner" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "selectors": { - "type": "object", - "description": "The selectors defined in selectors.yml", - "propertyNames": { - "type": "string" - } - }, - "disabled": { - "description": "A mapping of the disabled nodes in the target", - "anyOf": [ - { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "anyOf": [ - { + "input_measures": { + "type": "array", + "items": { "type": "object", - "title": "AnalysisNode", + "title": "MetricInputMeasure", "properties": { - "database": { + "name": { + "type": "string" + }, + "filter": { "anyOf": [ { - "type": "string" + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] }, { "type": "null" } - ] - }, - "schema": { - "type": "string" - }, - "name": { - "type": "string" - }, - "resource_type": { - "const": "analysis" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } + ], + "default": null }, "alias": { - "type": "string" - }, - "checksum": { - "type": "object", - "title": "FileHash", - "properties": { - "name": { + "anyOf": [ + { "type": "string" }, - "checksum": { - "type": "string" + { + "type": "null" } - }, - "additionalProperties": false, - "required": [ - "name", - "checksum" - ] + ], + "default": null }, - "config": { - "type": "object", - "title": "NodeConfig", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" }, - "tags": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "numerator": { + "anyOf": [ + { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } } }, - { - "type": "string" - } - ] - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" } - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "materialized": { - "type": "string", - "default": "view" - }, - "incremental_strategy": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "persist_docs": { - "type": "object", - "propertyNames": { + ], + "default": null + }, + "alias": { + "anyOf": [ + { "type": "string" + }, + { + "type": "null" } - }, - "post-hook": { - "type": "array", - "items": { + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { "type": "object", - "title": "Hook", + "title": "MetricTimeWindow", "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true + "count": { + "type": "integer" }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null + "granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] } }, "additionalProperties": false, "required": [ - "sql" + "count", + "granularity" ] + }, + { + "type": "null" } - }, - "pre-hook": { - "type": "array", - "items": { + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "denominator": { + "anyOf": [ + { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { "type": "object", - "title": "Hook", + "title": "WhereFilterIntersection", "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } }, - { - "type": "null" - } - ], - "default": null + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] } }, "additionalProperties": false, "required": [ - "sql" + "count", + "granularity" ] + }, + { + "type": "null" } - }, - "quoting": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "column_types": { - "type": "object", - "propertyNames": { - "type": "string" + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "grain_to_date": { + "anyOf": [ + { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metrics": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" }, - "full_refresh": { + "filter": { "anyOf": [ { - "type": "boolean" + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] }, { "type": "null" @@ -10032,132 +8524,118 @@ ], "default": null }, - "unique_key": { + "alias": { "anyOf": [ { "type": "string" }, - { - "type": "array", - "items": { - "type": "string" - } - }, { "type": "null" } ], "default": null }, - "on_schema_change": { + "offset_window": { "anyOf": [ { - "type": "string" + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] }, { "type": "null" } ], - "default": "ignore" - }, - "on_configuration_change": { - "enum": [ - "apply", - "continue", - "fail" - ] - }, - "grants": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "packages": { - "type": "array", - "items": { - "type": "string" - } - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false + "default": null }, - "contract": { - "type": "object", - "title": "ContractConfig", - "properties": { - "enforced": { - "type": "boolean", - "default": false + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] }, - "alias_types": { - "type": "boolean", - "default": true + { + "type": "null" } - }, - "additionalProperties": false + ], + "default": null } }, - "additionalProperties": true - }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "description": { - "type": "string", - "default": "" - }, - "columns": { - "type": "object", - "additionalProperties": { + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "conversion_type_params": { + "anyOf": [ + { + "type": "object", + "title": "ConversionTypeParams", + "properties": { + "base_measure": { "type": "object", - "title": "ColumnInfo", + "title": "MetricInputMeasure", "properties": { "name": { "type": "string" }, - "description": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "data_type": { + "filter": { "anyOf": [ { - "type": "string" + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] }, { "type": "null" @@ -10165,63 +8643,10 @@ ], "default": null }, - "constraints": { - "type": "array", - "items": { - "type": "object", - "title": "ColumnLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "expression": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "warn_unenforced": { - "type": "boolean", - "default": true - }, - "warn_unsupported": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false, - "required": [ - "type" - ] - } - }, - "quote": { + "alias": { "anyOf": [ { - "type": "boolean" + "type": "string" }, { "type": "null" @@ -10229,140 +8654,61 @@ ], "default": null }, - "tags": { - "type": "array", - "items": { - "type": "string" - } + "join_to_timespine": { + "type": "boolean", + "default": false }, - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": true, - "required": [ - "name" - ] - }, - "propertyNames": { - "type": "string" - } - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "patch_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "build_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "deferred": { - "type": "boolean", - "default": false - }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "created_at": { - "type": "number" - }, - "config_call_dict": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "relation_name": { - "anyOf": [ - { - "type": "string" + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } }, - { - "type": "null" - } - ], - "default": null - }, - "raw_code": { - "type": "string", - "default": "" - }, - "language": { - "type": "string", - "default": "sql" - }, - "refs": { - "type": "array", - "items": { + "additionalProperties": false, + "required": [ + "name" + ] + }, + "conversion_measure": { "type": "object", - "title": "RefArgs", + "title": "MetricInputMeasure", "properties": { "name": { "type": "string" }, - "package": { + "filter": { "anyOf": [ { - "type": "string" + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] }, { "type": "null" @@ -10370,13 +8716,25 @@ ], "default": null }, - "version": { + "alias": { "anyOf": [ { "type": "string" }, { - "type": "number" + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" }, { "type": "null" @@ -10389,151 +8747,433 @@ "required": [ "name" ] - } - }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "depends_on": { - "type": "object", - "title": "DependsOn", - "properties": { - "macros": { - "type": "array", - "items": { - "type": "string" - } - }, - "nodes": { - "type": "array", - "items": { - "type": "string" + }, + "entity": { + "type": "string" + }, + "calculation": { + "enum": [ + "conversions", + "conversion_rate" + ], + "default": "conversion_rate" + }, + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" } - } + ], + "default": null }, - "additionalProperties": false - }, - "compiled_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "compiled": { - "type": "boolean", - "default": false - }, - "compiled_code": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "extra_ctes_injected": { - "type": "boolean", - "default": false - }, - "extra_ctes": { - "type": "array", - "items": { - "type": "object", - "title": "InjectedCTE", - "properties": { - "id": { - "type": "string" + "constant_properties": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "object", + "title": "ConstantPropertyInput", + "properties": { + "base_property": { + "type": "string" + }, + "conversion_property": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "base_property", + "conversion_property" + ] + } }, - "sql": { - "type": "string" + { + "type": "null" } - }, - "additionalProperties": false, - "required": [ - "id", - "sql" - ] + ], + "default": null } }, - "_pre_injected_sql": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "contract": { + "additionalProperties": false, + "required": [ + "base_measure", + "conversion_measure", + "entity" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { "type": "object", - "title": "Contract", + "title": "WhereFilter", "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "where_sql_template": { + "type": "string" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "MetricConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "description", + "label", + "type", + "type_params" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "groups": { + "type": "object", + "description": "The groups defined in the dbt project", + "additionalProperties": { + "type": "object", + "title": "Group", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "group" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "owner": { + "type": "object", + "title": "Owner", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "email": { + "anyOf": [ + { + "type": "string" }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - }, + { + "type": "null" + } + ], + "default": null + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "owner" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "selectors": { + "type": "object", + "description": "The selectors defined in selectors.yml", + "propertyNames": { + "type": "string" + } + }, + "disabled": { + "description": "A mapping of the disabled nodes in the target", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "anyOf": [ { "type": "object", - "title": "SingularTestNode", + "title": "Seed", "properties": { "database": { "anyOf": [ @@ -10552,7 +9192,7 @@ "type": "string" }, "resource_type": { - "const": "test" + "const": "seed" }, "package_name": { "type": "string" @@ -10594,7 +9234,7 @@ }, "config": { "type": "object", - "title": "TestConfig", + "title": "SeedConfig", "properties": { "_extra": { "type": "object", @@ -10602,14 +9242,172 @@ "type": "string" } }, - "enabled": { - "type": "boolean", - "default": true + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "seed" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } }, - "alias": { + "full_refresh": { "anyOf": [ { - "type": "string" + "type": "boolean" }, { "type": "null" @@ -10617,18 +9415,24 @@ ], "default": null }, - "schema": { + "unique_key": { "anyOf": [ { "type": "string" }, + { + "type": "array", + "items": { + "type": "string" + } + }, { "type": "null" } ], - "default": "dbt_test__audit" + "default": null }, - "database": { + "on_schema_change": { "anyOf": [ { "type": "string" @@ -10637,112 +9441,82 @@ "type": "null" } ], - "default": null + "default": "ignore" }, - "tags": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "string" - } + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" ] }, - "meta": { + "grants": { "type": "object", "propertyNames": { "type": "string" } }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "materialized": { - "type": "string", - "default": "test" - }, - "severity": { - "type": "string", - "default": "ERROR", - "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + "packages": { + "type": "array", + "items": { + "type": "string" + } }, - "store_failures": { - "anyOf": [ - { - "type": "boolean" + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true }, - { - "type": "null" + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - ], - "default": null + }, + "additionalProperties": false }, - "store_failures_as": { - "anyOf": [ - { - "type": "string" + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false }, - { - "type": "null" + "alias_types": { + "type": "boolean", + "default": true } - ], - "default": null + }, + "additionalProperties": false }, - "where": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "delimiter": { + "type": "string", + "default": "," }, - "limit": { + "quote_columns": { "anyOf": [ { - "type": "integer" + "type": "boolean" }, { "type": "null" } ], "default": null - }, - "fail_calc": { - "type": "string", - "default": "count(*)" - }, - "warn_if": { - "type": "string", - "default": "!= 0" - }, - "error_if": { - "type": "string", - "default": "!= 0" } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -10912,141 +9686,40 @@ "anyOf": [ { "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "build_path": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "deferred": { - "type": "boolean", - "default": false - }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "created_at": { - "type": "number" - }, - "config_call_dict": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "raw_code": { - "type": "string", - "default": "" - }, - "language": { - "type": "string", - "default": "sql" - }, - "refs": { - "type": "array", - "items": { - "type": "object", - "title": "RefArgs", - "properties": { - "name": { - "type": "string" - }, - "package": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" + }, + { + "type": "null" } - } + ], + "default": null }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { + "build_path": { + "anyOf": [ + { "type": "string" + }, + { + "type": "null" } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" } }, - "depends_on": { + "created_at": { + "type": "number" + }, + "config_call_dict": { "type": "object", - "title": "DependsOn", - "properties": { - "macros": { - "type": "array", - "items": { - "type": "string" - } - }, - "nodes": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false + "propertyNames": { + "type": "string" + } }, - "compiled_path": { + "relation_name": { "anyOf": [ { "type": "string" @@ -11057,11 +9730,11 @@ ], "default": null }, - "compiled": { - "type": "boolean", - "default": false + "raw_code": { + "type": "string", + "default": "" }, - "compiled_code": { + "root_path": { "anyOf": [ { "type": "string" @@ -11072,66 +9745,399 @@ ], "default": null }, - "extra_ctes_injected": { - "type": "boolean", - "default": false - }, - "extra_ctes": { - "type": "array", - "items": { - "type": "object", - "title": "InjectedCTE", - "properties": { - "id": { + "depends_on": { + "type": "object", + "title": "MacroDependsOn", + "properties": { + "macros": { + "type": "array", + "items": { "type": "string" + } + } + }, + "additionalProperties": false + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } }, - "sql": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "id", - "sql" - ] - } - }, - "_pre_injected_sql": { - "anyOf": [ - { - "type": "string" + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] }, { "type": "null" } ], "default": null - }, - "contract": { - "type": "object", - "title": "Contract", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false } }, "additionalProperties": false, @@ -11151,7 +10157,7 @@ }, { "type": "object", - "title": "HookNode", + "title": "Analysis", "properties": { "database": { "anyOf": [ @@ -11170,7 +10176,7 @@ "type": "string" }, "resource_type": { - "const": "operation" + "const": "analysis" }, "package_name": { "type": "string" @@ -11480,12 +10486,6 @@ }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -11673,10 +10673,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -11875,17 +10871,6 @@ } }, "additionalProperties": false - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": false, @@ -11905,7 +10890,7 @@ }, { "type": "object", - "title": "ModelNode", + "title": "SingularTest", "properties": { "database": { "anyOf": [ @@ -11924,7 +10909,7 @@ "type": "string" }, "resource_type": { - "const": "model" + "const": "test" }, "package_name": { "type": "string" @@ -11966,7 +10951,7 @@ }, "config": { "type": "object", - "title": "ModelConfig", + "title": "TestConfig", "properties": { "_extra": { "type": "object", @@ -11998,7 +10983,7 @@ "type": "null" } ], - "default": null + "default": "dbt_test__audit" }, "database": { "anyOf": [ @@ -12043,12 +11028,17 @@ }, "materialized": { "type": "string", - "default": "view" + "default": "test" }, - "incremental_strategy": { + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { "anyOf": [ { - "type": "string" + "type": "boolean" }, { "type": "null" @@ -12056,90 +11046,10 @@ ], "default": null }, - "persist_docs": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "post-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "pre-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "quoting": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "column_types": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "full_refresh": { + "store_failures_as": { "anyOf": [ { - "type": "boolean" + "type": "string" }, { "type": "null" @@ -12147,107 +11057,43 @@ ], "default": null }, - "unique_key": { + "where": { "anyOf": [ { "type": "string" }, - { - "type": "array", - "items": { - "type": "string" - } - }, { "type": "null" } ], "default": null }, - "on_schema_change": { + "limit": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "default": "ignore" - }, - "on_configuration_change": { - "enum": [ - "apply", - "continue", - "fail" - ] - }, - "grants": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "packages": { - "type": "array", - "items": { - "type": "string" - } + "default": null }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false + "fail_calc": { + "type": "string", + "default": "count(*)" }, - "contract": { - "type": "object", - "title": "ContractConfig", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false + "warn_if": { + "type": "string", + "default": "!= 0" }, - "access": { - "enum": [ - "private", - "protected", - "public" - ], - "default": "protected" + "error_if": { + "type": "string", + "default": "!= 0" } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -12435,10 +11281,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -12637,159 +11479,6 @@ } }, "additionalProperties": false - }, - "access": { - "enum": [ - "private", - "protected", - "public" - ], - "default": "protected" - }, - "constraints": { - "type": "array", - "items": { - "type": "object", - "title": "ModelLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "expression": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "warn_unenforced": { - "type": "boolean", - "default": true - }, - "warn_unsupported": { - "type": "boolean", - "default": true - }, - "columns": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false, - "required": [ - "type" - ] - } - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - }, - "latest_version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - }, - "deprecation_date": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "defer_relation": { - "anyOf": [ - { - "type": "object", - "title": "DeferRelation", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "alias": { - "type": "string" - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "alias", - "relation_name" - ] - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": false, @@ -12809,7 +11498,7 @@ }, { "type": "object", - "title": "RPCNode", + "title": "HookNode", "properties": { "database": { "anyOf": [ @@ -12828,7 +11517,7 @@ "type": "string" }, "resource_type": { - "const": "rpc" + "const": "operation" }, "package_name": { "type": "string" @@ -13138,12 +11827,6 @@ }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -13331,10 +12014,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -13533,6 +12212,17 @@ } }, "additionalProperties": false + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, @@ -13552,7 +12242,7 @@ }, { "type": "object", - "title": "SqlNode", + "title": "Model", "properties": { "database": { "anyOf": [ @@ -13571,7 +12261,7 @@ "type": "string" }, "resource_type": { - "const": "sql_operation" + "const": "model" }, "package_name": { "type": "string" @@ -13613,7 +12303,7 @@ }, "config": { "type": "object", - "title": "NodeConfig", + "title": "ModelConfig", "properties": { "_extra": { "type": "object", @@ -13877,16 +12567,18 @@ } }, "additionalProperties": false + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -14074,10 +12766,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -14245,326 +12933,63 @@ { "type": "string" }, - { - "type": "null" - } - ], - "default": null - }, - "contract": { - "type": "object", - "title": "Contract", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "name", - "resource_type", - "package_name", - "path", - "original_file_path", - "unique_id", - "fqn", - "alias", - "checksum" - ] - }, - { - "type": "object", - "title": "GenericTestNode", - "properties": { - "test_metadata": { - "type": "object", - "title": "TestMetadata", - "properties": { - "name": { - "type": "string" - }, - "kwargs": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "namespace": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - }, - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "name": { - "type": "string" - }, - "resource_type": { - "const": "test" - }, - "package_name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "original_file_path": { - "type": "string" - }, - "unique_id": { - "type": "string" - }, - "fqn": { - "type": "array", - "items": { - "type": "string" - } - }, - "alias": { - "type": "string" - }, - "checksum": { - "type": "object", - "title": "FileHash", - "properties": { - "name": { - "type": "string" - }, - "checksum": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "name", - "checksum" - ] - }, - "config": { - "type": "object", - "title": "TestConfig", - "properties": { - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "enabled": { - "type": "boolean", - "default": true - }, - "alias": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "dbt_test__audit" - }, - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "tags": { - "anyOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "string" - } - ] - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "group": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "materialized": { - "type": "string", - "default": "test" - }, - "severity": { - "type": "string", - "default": "ERROR", - "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" - }, - "store_failures": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null - }, - "store_failures_as": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false }, - "where": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null + "alias_types": { + "type": "boolean", + "default": true }, - "limit": { + "checksum": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], "default": null - }, - "fail_calc": { - "type": "string", - "default": "count(*)" - }, - "warn_if": { - "type": "string", - "default": "!= 0" - }, - "error_if": { - "type": "string", - "default": "!= 0" } }, - "additionalProperties": true + "additionalProperties": false }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" }, - "tags": { + "constraints": { "type": "array", "items": { - "type": "string" - } - }, - "description": { - "type": "string", - "default": "" - }, - "columns": { - "type": "object", - "additionalProperties": { "type": "object", - "title": "ColumnInfo", + "title": "ModelLevelConstraint", "properties": { - "name": { - "type": "string" - }, - "description": { - "type": "string", - "default": "" - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" - } + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] }, - "data_type": { + "name": { "anyOf": [ { "type": "string" @@ -14575,63 +13000,10 @@ ], "default": null }, - "constraints": { - "type": "array", - "items": { - "type": "object", - "title": "ColumnLevelConstraint", - "properties": { - "type": { - "enum": [ - "check", - "not_null", - "unique", - "primary_key", - "foreign_key", - "custom" - ] - }, - "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "expression": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "warn_unenforced": { - "type": "boolean", - "default": true - }, - "warn_unsupported": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false, - "required": [ - "type" - ] - } - }, - "quote": { + "expression": { "anyOf": [ { - "type": "boolean" + "type": "string" }, { "type": "null" @@ -14639,71 +13011,34 @@ ], "default": null }, - "tags": { + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "columns": { "type": "array", "items": { "type": "string" } - }, - "_extra": { - "type": "object", - "propertyNames": { - "type": "string" - } } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ - "name" + "type" ] - }, - "propertyNames": { - "type": "string" - } - }, - "meta": { - "type": "object", - "propertyNames": { - "type": "string" } }, - "group": { + "version": { "anyOf": [ { "type": "string" }, { - "type": "null" - } - ], - "default": null - }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "patch_path": { - "anyOf": [ - { - "type": "string" + "type": "number" }, { "type": "null" @@ -14711,37 +13046,21 @@ ], "default": null }, - "build_path": { + "latest_version": { "anyOf": [ { "type": "string" }, + { + "type": "number" + }, { "type": "null" } ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "created_at": { - "type": "number" - }, - "config_call_dict": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "relation_name": { + "deprecation_date": { "anyOf": [ { "type": "string" @@ -14750,98 +13069,382 @@ "type": "null" } ], - "default": null - }, - "raw_code": { - "type": "string", - "default": "" - }, - "language": { - "type": "string", - "default": "sql" + "default": null }, - "refs": { - "type": "array", - "items": { - "type": "object", - "title": "RefArgs", - "properties": { - "name": { - "type": "string" - }, - "package": { - "anyOf": [ - { + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { "type": "string" - }, - { - "type": "null" } - ], - "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "name" - ] - } - }, - "sources": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "metrics": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "depends_on": { - "type": "object", - "title": "DependsOn", - "properties": { - "macros": { - "type": "array", - "items": { - "type": "string" - } - }, - "nodes": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "additionalProperties": false - }, - "compiled_path": { - "anyOf": [ - { - "type": "string" + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] }, { "type": "null" @@ -14849,109 +13452,14 @@ ], "default": null }, - "compiled": { - "type": "boolean", - "default": false - }, - "compiled_code": { + "primary_key": { "anyOf": [ { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "extra_ctes_injected": { - "type": "boolean", - "default": false - }, - "extra_ctes": { - "type": "array", - "items": { - "type": "object", - "title": "InjectedCTE", - "properties": { - "id": { - "type": "string" - }, - "sql": { + "type": "array", + "items": { "type": "string" } }, - "additionalProperties": false, - "required": [ - "id", - "sql" - ] - } - }, - "_pre_injected_sql": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "contract": { - "type": "object", - "title": "Contract", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - }, - "checksum": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false - }, - "column_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "file_key_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "attached_node": { - "anyOf": [ - { - "type": "string" - }, { "type": "null" } @@ -14961,7 +13469,6 @@ }, "additionalProperties": false, "required": [ - "test_metadata", "database", "schema", "name", @@ -14977,7 +13484,7 @@ }, { "type": "object", - "title": "SnapshotNode", + "title": "SqlOperation", "properties": { "database": { "anyOf": [ @@ -14996,7 +13503,7 @@ "type": "string" }, "resource_type": { - "const": "snapshot" + "const": "sql_operation" }, "package_name": { "type": "string" @@ -15038,7 +13545,7 @@ }, "config": { "type": "object", - "title": "SnapshotConfig", + "title": "NodeConfig", "properties": { "_extra": { "type": "object", @@ -15115,7 +13622,7 @@ }, "materialized": { "type": "string", - "default": "snapshot" + "default": "view" }, "incremental_strategy": { "anyOf": [ @@ -15224,6 +13731,12 @@ { "type": "string" }, + { + "type": "array", + "items": { + "type": "string" + } + }, { "type": "null" } @@ -15288,85 +13801,18 @@ "properties": { "enforced": { "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false - }, - "strategy": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "target_schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "target_database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "updated_at": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - }, - "check_cols": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "string" - } + "default": false }, - { - "type": "null" + "alias_types": { + "type": "boolean", + "default": true } - ], - "default": null + }, + "additionalProperties": false } }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -15554,10 +14000,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -15756,53 +14198,6 @@ } }, "additionalProperties": false - }, - "defer_relation": { - "anyOf": [ - { - "type": "object", - "title": "DeferRelation", - "properties": { - "database": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "schema": { - "type": "string" - }, - "alias": { - "type": "string" - }, - "relation_name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "required": [ - "database", - "schema", - "alias", - "relation_name" - ] - }, - { - "type": "null" - } - ], - "default": null } }, "additionalProperties": false, @@ -15817,13 +14212,12 @@ "unique_id", "fqn", "alias", - "checksum", - "config" + "checksum" ] }, { "type": "object", - "title": "UnitTestNode", + "title": "GenericTest", "properties": { "database": { "anyOf": [ @@ -15842,7 +14236,7 @@ "type": "string" }, "resource_type": { - "const": "unit_test" + "const": "test" }, "package_name": { "type": "string" @@ -15884,7 +14278,7 @@ }, "config": { "type": "object", - "title": "UnitTestNodeConfig", + "title": "TestConfig", "properties": { "_extra": { "type": "object", @@ -15916,7 +14310,7 @@ "type": "null" } ], - "default": null + "default": "dbt_test__audit" }, "database": { "anyOf": [ @@ -15961,12 +14355,17 @@ }, "materialized": { "type": "string", - "default": "view" + "default": "test" }, - "incremental_strategy": { + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { "anyOf": [ { - "type": "string" + "type": "boolean" }, { "type": "null" @@ -15974,90 +14373,10 @@ ], "default": null }, - "persist_docs": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "post-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "pre-hook": { - "type": "array", - "items": { - "type": "object", - "title": "Hook", - "properties": { - "sql": { - "type": "string" - }, - "transaction": { - "type": "boolean", - "default": true - }, - "index": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false, - "required": [ - "sql" - ] - } - }, - "quoting": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "column_types": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "full_refresh": { + "store_failures_as": { "anyOf": [ { - "type": "boolean" + "type": "string" }, { "type": "null" @@ -16065,107 +14384,42 @@ ], "default": null }, - "unique_key": { + "where": { "anyOf": [ { "type": "string" }, - { - "type": "array", - "items": { - "type": "string" - } - }, { "type": "null" } ], "default": null }, - "on_schema_change": { + "limit": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "default": "ignore" - }, - "on_configuration_change": { - "enum": [ - "apply", - "continue", - "fail" - ] - }, - "grants": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "packages": { - "type": "array", - "items": { - "type": "string" - } + "default": null }, - "docs": { - "type": "object", - "title": "Docs", - "properties": { - "show": { - "type": "boolean", - "default": true - }, - "node_color": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null - } - }, - "additionalProperties": false + "fail_calc": { + "type": "string", + "default": "count(*)" }, - "contract": { - "type": "object", - "title": "ContractConfig", - "properties": { - "enforced": { - "type": "boolean", - "default": false - }, - "alias_types": { - "type": "boolean", - "default": true - } - }, - "additionalProperties": false + "warn_if": { + "type": "string", + "default": "!= 0" }, - "expected_rows": { - "type": "array", - "items": { - "type": "object", - "propertyNames": { - "type": "string" - } - } + "error_if": { + "type": "string", + "default": "!= 0" } }, - "additionalProperties": true - }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } + "additionalProperties": true }, "tags": { "type": "array", @@ -16354,10 +14608,6 @@ ], "default": null }, - "deferred": { - "type": "boolean", - "default": false - }, "unrendered_config": { "type": "object", "propertyNames": { @@ -16557,7 +14807,7 @@ }, "additionalProperties": false }, - "tested_node_unique_id": { + "column_name": { "anyOf": [ { "type": "string" @@ -16568,7 +14818,7 @@ ], "default": null }, - "this_input_node_unique_id": { + "file_key_name": { "anyOf": [ { "type": "string" @@ -16579,38 +14829,44 @@ ], "default": null }, - "overrides": { + "attached_node": { "anyOf": [ { - "type": "object", - "title": "UnitTestOverrides", - "properties": { - "macros": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "vars": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "env_vars": { - "type": "object", - "propertyNames": { - "type": "string" - } - } - }, - "additionalProperties": false + "type": "string" }, { "type": "null" } ], "default": null + }, + "test_metadata": { + "type": "object", + "title": "TestMetadata", + "properties": { + "name": { + "type": "string", + "default": "test" + }, + "kwargs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "namespace": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false } }, "additionalProperties": false, @@ -16630,7 +14886,7 @@ }, { "type": "object", - "title": "SeedNode", + "title": "Snapshot", "properties": { "database": { "anyOf": [ @@ -16649,7 +14905,7 @@ "type": "string" }, "resource_type": { - "const": "seed" + "const": "snapshot" }, "package_name": { "type": "string" @@ -16691,7 +14947,7 @@ }, "config": { "type": "object", - "title": "SeedConfig", + "title": "SnapshotConfig", "properties": { "_extra": { "type": "object", @@ -16768,7 +15024,7 @@ }, "materialized": { "type": "string", - "default": "seed" + "default": "snapshot" }, "incremental_strategy": { "anyOf": [ @@ -16877,12 +15133,6 @@ { "type": "string" }, - { - "type": "array", - "items": { - "type": "string" - } - }, { "type": "null" } @@ -16956,14 +15206,60 @@ }, "additionalProperties": false }, - "delimiter": { - "type": "string", - "default": "," + "strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "quote_columns": { + "target_schema": { "anyOf": [ { - "type": "boolean" + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "check_cols": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } }, { "type": "null" @@ -16974,12 +15270,6 @@ }, "additionalProperties": true }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "tags": { "type": "array", "items": { @@ -17156,7 +15446,130 @@ ], "default": null }, - "build_path": { + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { "anyOf": [ { "type": "string" @@ -17167,26 +15580,11 @@ ], "default": null }, - "deferred": { + "compiled": { "type": "boolean", "default": false }, - "unrendered_config": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "created_at": { - "type": "number" - }, - "config_call_dict": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, - "relation_name": { + "compiled_code": { "anyOf": [ { "type": "string" @@ -17197,11 +15595,31 @@ ], "default": null }, - "raw_code": { - "type": "string", - "default": "" + "extra_ctes_injected": { + "type": "boolean", + "default": false }, - "root_path": { + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { "anyOf": [ { "type": "string" @@ -17212,15 +15630,28 @@ ], "default": null }, - "depends_on": { + "contract": { "type": "object", - "title": "MacroDependsOn", + "title": "Contract", "properties": { - "macros": { - "type": "array", - "items": { - "type": "string" - } + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false @@ -17256,6 +15687,333 @@ "type": "null" } ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, @@ -17263,7 +16021,14 @@ "database", "schema", "alias", - "relation_name" + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" ] }, { @@ -17285,7 +16050,8 @@ "unique_id", "fqn", "alias", - "checksum" + "checksum", + "config" ] }, { @@ -17341,12 +16107,6 @@ "identifier": { "type": "string" }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "quoting": { "type": "object", "title": "Quoting", @@ -19038,6 +17798,12 @@ } ], "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } } }, "additionalProperties": true @@ -19300,6 +18066,17 @@ } ], "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, @@ -19315,12 +18092,6 @@ ] } }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "description": { "anyOf": [ { @@ -19445,6 +18216,17 @@ } ], "default": null + }, + "cache": { + "type": "object", + "title": "SavedQueryCache", + "properties": { + "enabled": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false } }, "additionalProperties": true @@ -19625,7 +18407,7 @@ "type": "null" } ], - "default": null + "default": "" } }, "additionalProperties": false, @@ -20292,7 +19074,8 @@ "format": { "enum": [ "csv", - "dict" + "dict", + "sql" ], "default": "dict" }, @@ -20341,7 +19124,8 @@ "format": { "enum": [ "csv", - "dict" + "dict", + "sql" ], "default": "dict" }, @@ -20402,12 +19186,6 @@ "type": "string" } }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "description": { "type": "string", "default": "" @@ -20517,6 +19295,80 @@ } ], "default": null + }, + "created_at": { + "type": "number" + }, + "versions": { + "anyOf": [ + { + "type": "object", + "title": "UnitTestNodeVersions", + "properties": { + "include": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "exclude": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, @@ -20752,6 +19604,17 @@ } ], "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, @@ -20767,12 +19630,6 @@ ] } }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "description": { "anyOf": [ { @@ -20897,6 +19754,17 @@ } ], "default": null + }, + "cache": { + "type": "object", + "title": "SavedQueryCache", + "properties": { + "enabled": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false } }, "additionalProperties": true @@ -21084,7 +19952,7 @@ "type": "null" } ], - "default": null + "default": "" } }, "additionalProperties": false, @@ -21758,7 +20626,8 @@ "format": { "enum": [ "csv", - "dict" + "dict", + "sql" ], "default": "dict" }, @@ -21807,7 +20676,8 @@ "format": { "enum": [ "csv", - "dict" + "dict", + "sql" ], "default": "dict" }, @@ -21868,12 +20738,6 @@ "type": "string" } }, - "_event_status": { - "type": "object", - "propertyNames": { - "type": "string" - } - }, "description": { "type": "string", "default": "" @@ -21983,6 +20847,80 @@ } ], "default": null + }, + "created_at": { + "type": "number" + }, + "versions": { + "anyOf": [ + { + "type": "object", + "title": "UnitTestNodeVersions", + "properties": { + "include": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "exclude": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null } }, "additionalProperties": false, diff --git a/scripts/update_dev_packages.sh b/scripts/update_dev_packages.sh new file mode 100755 index 00000000000..3f140f04b89 --- /dev/null +++ b/scripts/update_dev_packages.sh @@ -0,0 +1,14 @@ +#!/bin/bash -e +set -e + +repo=$1 +ref=$2 +target_req_file="dev-requirements.txt" + +req_sed_pattern="s|${repo}.git@main|${repo}.git@${ref}|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$req_sed_pattern" $target_req_file +else + sed -i "$req_sed_pattern" $target_req_file +fi diff --git a/tests/fixtures/jaffle_shop.py b/tests/fixtures/jaffle_shop.py index 5ac17c81e1d..9b366ed2d5a 100644 --- a/tests/fixtures/jaffle_shop.py +++ b/tests/fixtures/jaffle_shop.py @@ -1,5 +1,7 @@ -import pytest import os + +import pytest + from dbt.tests.util import read_file # models/customers.sql diff --git a/tests/functional/access/test_access.py b/tests/functional/access/test_access.py index 8fa4f791d0f..5ef10c0e100 100644 --- a/tests/functional/access/test_access.py +++ b/tests/functional/access/test_access.py @@ -1,10 +1,10 @@ import pytest +from dbt.exceptions import DbtReferenceError, InvalidAccessTypeError +from dbt.node_types import AccessType from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 -from dbt.tests.util import run_dbt, get_manifest, write_file, rm_file -from dbt.node_types import AccessType -from dbt.exceptions import InvalidAccessTypeError, DbtReferenceError my_model_sql = "select 1 as fun" diff --git a/tests/functional/adapter/aliases/test_aliases.py b/tests/functional/adapter/aliases/test_aliases.py index d5f5b37152e..3e35518f06c 100644 --- a/tests/functional/adapter/aliases/test_aliases.py +++ b/tests/functional/adapter/aliases/test_aliases.py @@ -1,22 +1,23 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.adapter.aliases.fixtures import ( MACROS__CAST_SQL, MACROS__EXPECT_VALUE_SQL, - MODELS__SCHEMA_YML, - MODELS__FOO_ALIAS_SQL, MODELS__ALIAS_IN_PROJECT_SQL, MODELS__ALIAS_IN_PROJECT_WITH_OVERRIDE_SQL, + MODELS__FOO_ALIAS_SQL, MODELS__REF_FOO_ALIAS_SQL, + MODELS__SCHEMA_YML, MODELS_DUPE__MODEL_A_SQL, MODELS_DUPE__MODEL_B_SQL, - MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML, + MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL, + MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL, + MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML, MODELS_DUPE_CUSTOM_SCHEMA__MODEL_A_SQL, MODELS_DUPE_CUSTOM_SCHEMA__MODEL_B_SQL, MODELS_DUPE_CUSTOM_SCHEMA__MODEL_C_SQL, - MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML, - MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL, - MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL, + MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML, ) diff --git a/tests/functional/adapter/basic/test_adapter_methods.py b/tests/functional/adapter/basic/test_adapter_methods.py index aced1b21682..51d987f07de 100644 --- a/tests/functional/adapter/basic/test_adapter_methods.py +++ b/tests/functional/adapter/basic/test_adapter_methods.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal from dbt.tests.fixtures.project import write_project_files - +from dbt.tests.util import check_relations_equal, run_dbt tests__get_columns_in_relation_sql = """ {% set columns = adapter.get_columns_in_relation(ref('model')) %} diff --git a/tests/functional/adapter/basic/test_base.py b/tests/functional/adapter/basic/test_base.py index 64edd03872e..98edd9926cc 100644 --- a/tests/functional/adapter/basic/test_base.py +++ b/tests/functional/adapter/basic/test_base.py @@ -1,17 +1,18 @@ import pytest + from dbt.tests.util import ( - run_dbt, - check_result_nodes_by_name, - relation_from_name, check_relation_types, check_relations_equal, + check_result_nodes_by_name, + relation_from_name, + run_dbt, ) from tests.functional.adapter.basic.files import ( - seeds_base_csv, - base_view_sql, - base_table_sql, base_materialized_var_sql, + base_table_sql, + base_view_sql, schema_base_yml, + seeds_base_csv, ) diff --git a/tests/functional/adapter/basic/test_docs_generate.py b/tests/functional/adapter/basic/test_docs_generate.py index 9b849160564..c6c070bf2a3 100644 --- a/tests/functional/adapter/basic/test_docs_generate.py +++ b/tests/functional/adapter/basic/test_docs_generate.py @@ -1,14 +1,15 @@ -import pytest import os from datetime import datetime -import dbt -from dbt.tests.util import run_dbt, rm_file, get_artifact, check_datetime_between +import pytest + +import dbt from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_datetime_between, get_artifact, rm_file, run_dbt from tests.functional.adapter.basic.expected_catalog import ( base_expected_catalog, - no_stats, expected_references_catalog, + no_stats, ) models__schema_yml = """ diff --git a/tests/functional/adapter/basic/test_empty.py b/tests/functional/adapter/basic/test_empty.py index 8bfe928686f..49941ae6f67 100644 --- a/tests/functional/adapter/basic/test_empty.py +++ b/tests/functional/adapter/basic/test_empty.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt import os +from dbt.tests.util import run_dbt + class BaseEmpty: def test_empty(self, project): diff --git a/tests/functional/adapter/basic/test_ephemeral.py b/tests/functional/adapter/basic/test_ephemeral.py index 65a9e5af727..9b330b896cd 100644 --- a/tests/functional/adapter/basic/test_ephemeral.py +++ b/tests/functional/adapter/basic/test_ephemeral.py @@ -1,18 +1,20 @@ -import pytest import os + +import pytest + from dbt.tests.util import ( - run_dbt, - get_manifest, check_relations_equal, check_result_nodes_by_name, + get_manifest, relation_from_name, + run_dbt, ) from tests.functional.adapter.basic.files import ( - seeds_base_csv, base_ephemeral_sql, - ephemeral_view_sql, ephemeral_table_sql, + ephemeral_view_sql, schema_base_yml, + seeds_base_csv, ) diff --git a/tests/functional/adapter/basic/test_generic_tests.py b/tests/functional/adapter/basic/test_generic_tests.py index 0c71a197bb0..12c564609ab 100644 --- a/tests/functional/adapter/basic/test_generic_tests.py +++ b/tests/functional/adapter/basic/test_generic_tests.py @@ -1,13 +1,14 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.adapter.basic.files import ( - seeds_base_csv, - generic_test_seed_yml, - base_view_sql, base_table_sql, - schema_base_yml, - generic_test_view_yml, + base_view_sql, + generic_test_seed_yml, generic_test_table_yml, + generic_test_view_yml, + schema_base_yml, + seeds_base_csv, ) diff --git a/tests/functional/adapter/basic/test_incremental.py b/tests/functional/adapter/basic/test_incremental.py index 4d94ff17d2d..11eadddbb3f 100644 --- a/tests/functional/adapter/basic/test_incremental.py +++ b/tests/functional/adapter/basic/test_incremental.py @@ -1,12 +1,13 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name + from dbt.artifacts.schemas.results import RunStatus +from dbt.tests.util import check_relations_equal, relation_from_name, run_dbt from tests.functional.adapter.basic.files import ( - seeds_base_csv, - seeds_added_csv, - schema_base_yml, - incremental_sql, incremental_not_schema_change_sql, + incremental_sql, + schema_base_yml, + seeds_added_csv, + seeds_base_csv, ) diff --git a/tests/functional/adapter/basic/test_singular_tests.py b/tests/functional/adapter/basic/test_singular_tests.py index 3f9071cc56e..5f72adc0588 100644 --- a/tests/functional/adapter/basic/test_singular_tests.py +++ b/tests/functional/adapter/basic/test_singular_tests.py @@ -1,9 +1,7 @@ import pytest -from tests.functional.adapter.basic.files import ( - test_passing_sql, - test_failing_sql, -) + from dbt.tests.util import check_result_nodes_by_name, run_dbt +from tests.functional.adapter.basic.files import test_failing_sql, test_passing_sql class BaseSingularTests: diff --git a/tests/functional/adapter/basic/test_singular_tests_ephemeral.py b/tests/functional/adapter/basic/test_singular_tests_ephemeral.py index 6e2315d5fbd..43f693ab69c 100644 --- a/tests/functional/adapter/basic/test_singular_tests_ephemeral.py +++ b/tests/functional/adapter/basic/test_singular_tests_ephemeral.py @@ -1,12 +1,12 @@ import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.adapter.basic.files import ( - seeds_base_csv, ephemeral_with_cte_sql, - test_ephemeral_passing_sql, - test_ephemeral_failing_sql, schema_base_yml, + seeds_base_csv, + test_ephemeral_failing_sql, + test_ephemeral_passing_sql, ) diff --git a/tests/functional/adapter/basic/test_snapshot_check_cols.py b/tests/functional/adapter/basic/test_snapshot_check_cols.py index 29eb35c3bea..18c2e9f5fd5 100644 --- a/tests/functional/adapter/basic/test_snapshot_check_cols.py +++ b/tests/functional/adapter/basic/test_snapshot_check_cols.py @@ -1,11 +1,12 @@ import pytest -from dbt.tests.util import run_dbt, update_rows, relation_from_name + +from dbt.tests.util import relation_from_name, run_dbt, update_rows from tests.functional.adapter.basic.files import ( - seeds_base_csv, - seeds_added_csv, cc_all_snapshot_sql, cc_date_snapshot_sql, cc_name_snapshot_sql, + seeds_added_csv, + seeds_base_csv, ) diff --git a/tests/functional/adapter/basic/test_snapshot_timestamp.py b/tests/functional/adapter/basic/test_snapshot_timestamp.py index 7c227084336..7211ce28663 100644 --- a/tests/functional/adapter/basic/test_snapshot_timestamp.py +++ b/tests/functional/adapter/basic/test_snapshot_timestamp.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.util import run_dbt, relation_from_name, update_rows + +from dbt.tests.util import relation_from_name, run_dbt, update_rows from tests.functional.adapter.basic.files import ( + seeds_added_csv, seeds_base_csv, seeds_newcolumns_csv, - seeds_added_csv, ts_snapshot_sql, ) diff --git a/tests/functional/adapter/basic/test_table_materialization.py b/tests/functional/adapter/basic/test_table_materialization.py index 279152d6985..ec07e4f2c0b 100644 --- a/tests/functional/adapter/basic/test_table_materialization.py +++ b/tests/functional/adapter/basic/test_table_materialization.py @@ -1,7 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal - +from dbt.tests.util import check_relations_equal, run_dbt seeds__seed_csv = """id,first_name,last_name,email,gender,ip_address 1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 diff --git a/tests/functional/adapter/catalog/relation_types.py b/tests/functional/adapter/catalog/relation_types.py index bbb3bed0a72..a73972b534d 100644 --- a/tests/functional/adapter/catalog/relation_types.py +++ b/tests/functional/adapter/catalog/relation_types.py @@ -1,7 +1,7 @@ -from dbt.artifacts.schemas.catalog import CatalogArtifact -from dbt.tests.util import run_dbt import pytest +from dbt.artifacts.schemas.catalog import CatalogArtifact +from dbt.tests.util import run_dbt from tests.functional.adapter.catalog import files diff --git a/tests/functional/adapter/column_types/test_column_types.py b/tests/functional/adapter/column_types/test_column_types.py index fd783a08ddc..7e028b33c5b 100644 --- a/tests/functional/adapter/column_types/test_column_types.py +++ b/tests/functional/adapter/column_types/test_column_types.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.adapter.column_types.fixtures import ( macro_test_is_type_sql, diff --git a/tests/functional/adapter/concurrency/test_concurrency.py b/tests/functional/adapter/concurrency/test_concurrency.py index 898deafa587..65932f95ea7 100644 --- a/tests/functional/adapter/concurrency/test_concurrency.py +++ b/tests/functional/adapter/concurrency/test_concurrency.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import ( check_relations_equal, check_table_does_not_exist, @@ -8,7 +9,6 @@ write_file, ) - models__invalid_sql = """ {{ config( diff --git a/tests/functional/adapter/constraints/test_constraints.py b/tests/functional/adapter/constraints/test_constraints.py index 894c451cda4..7fea5a7420f 100644 --- a/tests/functional/adapter/constraints/test_constraints.py +++ b/tests/functional/adapter/constraints/test_constraints.py @@ -1,43 +1,43 @@ -import pytest import re +import pytest + from dbt.tests.util import ( - run_dbt, get_manifest, - run_dbt_and_capture, - write_file, read_file, relation_from_name, + run_dbt, + run_dbt_and_capture, + write_file, ) - from tests.functional.adapter.constraints.fixtures import ( - my_model_sql, + constrained_model_schema_yml, + create_table_macro_sql, + foreign_key_model_sql, + incremental_foreign_key_model_raw_numbers_sql, + incremental_foreign_key_model_stg_numbers_sql, + incremental_foreign_key_schema_yml, + model_contract_header_schema_yml, + model_data_type_schema_yml, + model_fk_constraint_schema_yml, + model_quoted_column_schema_yml, + model_schema_yml, my_incremental_model_sql, - my_model_wrong_order_sql, - my_model_wrong_name_sql, + my_model_contract_sql_header_sql, my_model_data_type_sql, - model_data_type_schema_yml, - my_model_view_wrong_order_sql, - my_model_view_wrong_name_sql, - my_model_incremental_wrong_order_sql, + my_model_incremental_contract_sql_header_sql, + my_model_incremental_with_nulls_sql, my_model_incremental_wrong_name_sql, + my_model_incremental_wrong_order_depends_on_fk_sql, + my_model_incremental_wrong_order_sql, + my_model_sql, + my_model_view_wrong_name_sql, + my_model_view_wrong_order_sql, my_model_with_nulls_sql, - my_model_incremental_with_nulls_sql, my_model_with_quoted_column_name_sql, - model_schema_yml, - model_fk_constraint_schema_yml, - constrained_model_schema_yml, - model_quoted_column_schema_yml, - foreign_key_model_sql, + my_model_wrong_name_sql, my_model_wrong_order_depends_on_fk_sql, - my_model_incremental_wrong_order_depends_on_fk_sql, - my_model_contract_sql_header_sql, - my_model_incremental_contract_sql_header_sql, - model_contract_header_schema_yml, - create_table_macro_sql, - incremental_foreign_key_schema_yml, - incremental_foreign_key_model_raw_numbers_sql, - incremental_foreign_key_model_stg_numbers_sql, + my_model_wrong_order_sql, ) diff --git a/tests/functional/adapter/dbt_clone/test_dbt_clone.py b/tests/functional/adapter/dbt_clone/test_dbt_clone.py index a602b3cb7f7..9e5cb40a926 100644 --- a/tests/functional/adapter/dbt_clone/test_dbt_clone.py +++ b/tests/functional/adapter/dbt_clone/test_dbt_clone.py @@ -6,20 +6,20 @@ import pytest from dbt.exceptions import DbtRuntimeError +from dbt.tests.util import run_dbt, run_dbt_and_capture from tests.functional.adapter.dbt_clone.fixtures import ( - seed_csv, - table_model_sql, - view_model_sql, + custom_can_clone_tables_false_macros_sql, ephemeral_model_sql, exposures_yml, - schema_yml, - snapshot_sql, get_schema_name_sql, - macros_sql, infinite_macros_sql, - custom_can_clone_tables_false_macros_sql, + macros_sql, + schema_yml, + seed_csv, + snapshot_sql, + table_model_sql, + view_model_sql, ) -from dbt.tests.util import run_dbt, run_dbt_and_capture class BaseClone: @@ -85,17 +85,14 @@ def copy_state(self, project_root): def run_and_save_state(self, project_root, with_snapshot=False): results = run_dbt(["seed"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) results = run_dbt(["run"]) assert len(results) == 2 - assert not any(r.node.deferred for r in results) results = run_dbt(["test"]) assert len(results) == 2 if with_snapshot: results = run_dbt(["snapshot"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) # copy files self.copy_state(project_root) @@ -226,6 +223,7 @@ def test_clone_same_target_and_state(self, project, unique_schema, other_schema) clone_args = [ "clone", + "--defer", "--state", "target", ] diff --git a/tests/functional/adapter/dbt_debug/test_dbt_debug.py b/tests/functional/adapter/dbt_debug/test_dbt_debug.py index 3ad39e9c2ab..206c7598258 100644 --- a/tests/functional/adapter/dbt_debug/test_dbt_debug.py +++ b/tests/functional/adapter/dbt_debug/test_dbt_debug.py @@ -1,6 +1,7 @@ -import pytest import os import re + +import pytest import yaml from dbt.cli.exceptions import DbtUsageException diff --git a/tests/functional/adapter/dbt_show/test_dbt_show.py b/tests/functional/adapter/dbt_show/test_dbt_show.py index 4c5c1c18bac..08a494e031a 100644 --- a/tests/functional/adapter/dbt_show/test_dbt_show.py +++ b/tests/functional/adapter/dbt_show/test_dbt_show.py @@ -1,11 +1,11 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt from tests.functional.adapter.dbt_show.fixtures import ( - models__sql_header, models__ephemeral_model, - models__second_ephemeral_model, models__sample_model, + models__second_ephemeral_model, + models__sql_header, seeds__sample_seed, ) diff --git a/tests/functional/adapter/ephemeral/test_ephemeral.py b/tests/functional/adapter/ephemeral/test_ephemeral.py index 22491f3b45d..665a12399a2 100644 --- a/tests/functional/adapter/ephemeral/test_ephemeral.py +++ b/tests/functional/adapter/ephemeral/test_ephemeral.py @@ -1,8 +1,9 @@ -import pytest -import re import os -from dbt.tests.util import run_dbt, check_relations_equal +import re + +import pytest +from dbt.tests.util import check_relations_equal, run_dbt models__dependent_sql = """ diff --git a/tests/functional/adapter/hooks/test_model_hooks.py b/tests/functional/adapter/hooks/test_model_hooks.py index 90ba298054a..1a4c01cb506 100644 --- a/tests/functional/adapter/hooks/test_model_hooks.py +++ b/tests/functional/adapter/hooks/test_model_hooks.py @@ -1,15 +1,10 @@ -import pytest - from pathlib import Path -from dbt_common.exceptions import CompilationError -from dbt.exceptions import ParsingError - -from dbt.tests.util import ( - run_dbt, - write_file, -) +import pytest +from dbt.exceptions import ParsingError +from dbt.tests.util import run_dbt, write_file +from dbt_common.exceptions import CompilationError from tests.functional.adapter.hooks.fixtures import ( models__hooked, models__hooks, diff --git a/tests/functional/adapter/hooks/test_run_hooks.py b/tests/functional/adapter/hooks/test_run_hooks.py index 6671e8cee19..f8bec5c6aeb 100644 --- a/tests/functional/adapter/hooks/test_run_hooks.py +++ b/tests/functional/adapter/hooks/test_run_hooks.py @@ -1,20 +1,16 @@ import os -import pytest - from pathlib import Path +import pytest + +from dbt.tests.util import check_table_does_not_exist, run_dbt from tests.functional.adapter.hooks.fixtures import ( - macros__hook, macros__before_and_after, - models__hooks, - seeds__example_seed_csv, + macros__hook, macros_missing_column, + models__hooks, models__missing_column, -) - -from dbt.tests.util import ( - check_table_does_not_exist, - run_dbt, + seeds__example_seed_csv, ) diff --git a/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py b/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py index db958f1eda4..7597865154c 100644 --- a/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py +++ b/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py @@ -1,7 +1,8 @@ -import pytest -from dbt.tests.util import run_dbt, check_relations_equal from collections import namedtuple +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt models__merge_exclude_columns_sql = """ {{ config( diff --git a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py index f5d415de3cd..8182e35dd38 100644 --- a/tests/functional/adapter/incremental/test_incremental_on_schema_change.py +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -1,23 +1,19 @@ import pytest -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) - +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.adapter.incremental.fixtures import ( - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__A, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_IGNORE, - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, _MODELS__INCREMENTAL_IGNORE_TARGET, - _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, - _MODELS__A, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, ) diff --git a/tests/functional/adapter/incremental/test_incremental_predicates.py b/tests/functional/adapter/incremental/test_incremental_predicates.py index 2060e9eb6d4..f6eaf67fb17 100644 --- a/tests/functional/adapter/incremental/test_incremental_predicates.py +++ b/tests/functional/adapter/incremental/test_incremental_predicates.py @@ -1,7 +1,8 @@ -import pytest -from dbt.tests.util import run_dbt, check_relations_equal from collections import namedtuple +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt models__delete_insert_incremental_predicates_sql = """ {{ config( diff --git a/tests/functional/adapter/incremental/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py index e08e332ad3e..194d35db56f 100644 --- a/tests/functional/adapter/incremental/test_incremental_unique_id.py +++ b/tests/functional/adapter/incremental/test_incremental_unique_id.py @@ -1,9 +1,11 @@ -import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.artifacts.schemas.results import RunStatus from collections import namedtuple from pathlib import Path +import pytest + +from dbt.artifacts.schemas.results import RunStatus +from dbt.tests.util import check_relations_equal, run_dbt + models__trinary_unique_key_list_sql = """ -- a multi-argument unique key list should see overwriting on rows in the model -- where all unique key fields apply diff --git a/tests/functional/adapter/materialized_view/basic.py b/tests/functional/adapter/materialized_view/basic.py index 518522a4631..1d82977ae64 100644 --- a/tests/functional/adapter/materialized_view/basic.py +++ b/tests/functional/adapter/materialized_view/basic.py @@ -11,7 +11,6 @@ run_dbt_and_capture, set_model_file, ) - from tests.functional.adapter.materialized_view.files import ( MY_MATERIALIZED_VIEW, MY_SEED, diff --git a/tests/functional/adapter/materialized_view/changes.py b/tests/functional/adapter/materialized_view/changes.py index 243b1e34995..7005a5e0b26 100644 --- a/tests/functional/adapter/materialized_view/changes.py +++ b/tests/functional/adapter/materialized_view/changes.py @@ -3,7 +3,6 @@ import pytest from dbt.adapters.base.relation import BaseRelation -from dbt_common.contracts.config.materialization import OnConfigurationChangeOption from dbt.adapters.contracts.relation import RelationType from dbt.tests.util import ( assert_message_in_logs, @@ -12,7 +11,7 @@ run_dbt_and_capture, set_model_file, ) - +from dbt_common.contracts.config.materialization import OnConfigurationChangeOption from tests.functional.adapter.materialized_view.files import ( MY_MATERIALIZED_VIEW, MY_SEED, diff --git a/tests/functional/adapter/persist_docs/test_persist_docs.py b/tests/functional/adapter/persist_docs/test_persist_docs.py index aa311eba9f2..3f4521e69e5 100644 --- a/tests/functional/adapter/persist_docs/test_persist_docs.py +++ b/tests/functional/adapter/persist_docs/test_persist_docs.py @@ -1,9 +1,9 @@ import json import os + import pytest from dbt.tests.util import run_dbt - from tests.functional.adapter.persist_docs.fixtures import ( _DOCS__MY_FUN_DOCS, _MODELS__MISSING_COLUMN, @@ -11,8 +11,8 @@ _MODELS__NO_DOCS_MODEL, _MODELS__TABLE, _MODELS__VIEW, - _PROPERTIES__QUOTE_MODEL, _PROPERITES__SCHEMA_MISSING_COL, + _PROPERTIES__QUOTE_MODEL, _PROPERTIES__SCHEMA_YML, _SEEDS__SEED, ) diff --git a/tests/functional/adapter/python_model/test_python_model.py b/tests/functional/adapter/python_model/test_python_model.py index 259895abde9..bba537bb2fb 100644 --- a/tests/functional/adapter/python_model/test_python_model.py +++ b/tests/functional/adapter/python_model/test_python_model.py @@ -1,6 +1,8 @@ -import pytest import os + +import pytest import yaml + from dbt.tests.util import run_dbt basic_sql = """ diff --git a/tests/functional/adapter/python_model/test_spark.py b/tests/functional/adapter/python_model/test_spark.py index 7e105a957d3..2c5148e84fd 100644 --- a/tests/functional/adapter/python_model/test_spark.py +++ b/tests/functional/adapter/python_model/test_spark.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt PANDAS_MODEL = """ diff --git a/tests/functional/adapter/query_comment/test_query_comment.py b/tests/functional/adapter/query_comment/test_query_comment.py index 5651e54b39b..32dba5a1695 100644 --- a/tests/functional/adapter/query_comment/test_query_comment.py +++ b/tests/functional/adapter/query_comment/test_query_comment.py @@ -1,8 +1,13 @@ -import pytest import json + +import pytest + from dbt.exceptions import DbtRuntimeError from dbt.tests.util import run_dbt_and_capture -from tests.functional.adapter.query_comment.fixtures import MACROS__MACRO_SQL, MODELS__X_SQL +from tests.functional.adapter.query_comment.fixtures import ( + MACROS__MACRO_SQL, + MODELS__X_SQL, +) class BaseDefaultQueryComments: diff --git a/tests/functional/adapter/relations/test_changing_relation_type.py b/tests/functional/adapter/relations/test_changing_relation_type.py index 2eeb5aea64d..a9b719f7d92 100644 --- a/tests/functional/adapter/relations/test_changing_relation_type.py +++ b/tests/functional/adapter/relations/test_changing_relation_type.py @@ -1,9 +1,9 @@ from typing import List, Optional + import pytest from dbt.tests.util import run_dbt - _DEFAULT_CHANGE_RELATION_TYPE_MODEL = """ {{ config(materialized=var('materialized')) }} diff --git a/tests/functional/adapter/relations/test_dropping_schema_named.py b/tests/functional/adapter/relations/test_dropping_schema_named.py index 2999d43fec6..d626ba46992 100644 --- a/tests/functional/adapter/relations/test_dropping_schema_named.py +++ b/tests/functional/adapter/relations/test_dropping_schema_named.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_connection +from dbt.tests.util import get_connection, run_dbt class BaseDropSchemaNamed: diff --git a/tests/functional/adapter/simple_copy/test_copy_uppercase.py b/tests/functional/adapter/simple_copy/test_copy_uppercase.py index 277f592fb0e..931f485924f 100644 --- a/tests/functional/adapter/simple_copy/test_copy_uppercase.py +++ b/tests/functional/adapter/simple_copy/test_copy_uppercase.py @@ -1,18 +1,18 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.adapter.simple_copy.fixtures import ( - _PROPERTIES__SCHEMA_YML, - _SEEDS__SEED_INITIAL, _MODELS__ADVANCED_INCREMENTAL, _MODELS__COMPOUND_SORT, _MODELS__DISABLED, _MODELS__EMPTY, - _MODELS_GET_AND_REF_UPPERCASE, _MODELS__INCREMENTAL, _MODELS__INTERLEAVED_SORT, _MODELS__MATERIALIZED, _MODELS__VIEW_MODEL, + _MODELS_GET_AND_REF_UPPERCASE, + _PROPERTIES__SCHEMA_YML, + _SEEDS__SEED_INITIAL, ) diff --git a/tests/functional/adapter/simple_copy/test_simple_copy.py b/tests/functional/adapter/simple_copy/test_simple_copy.py index 77494b6b679..c86e7f4b01a 100644 --- a/tests/functional/adapter/simple_copy/test_simple_copy.py +++ b/tests/functional/adapter/simple_copy/test_simple_copy.py @@ -1,16 +1,12 @@ # mix in biguery # mix in snowflake -import pytest - from pathlib import Path -from dbt.tests.util import run_dbt, rm_file, write_file, check_relations_equal +import pytest +from dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file from tests.functional.adapter.simple_copy.fixtures import ( - _PROPERTIES__SCHEMA_YML, - _SEEDS__SEED_INITIAL, - _SEEDS__SEED_UPDATE, _MODELS__ADVANCED_INCREMENTAL, _MODELS__COMPOUND_SORT, _MODELS__DISABLED, @@ -20,6 +16,9 @@ _MODELS__INTERLEAVED_SORT, _MODELS__MATERIALIZED, _MODELS__VIEW_MODEL, + _PROPERTIES__SCHEMA_YML, + _SEEDS__SEED_INITIAL, + _SEEDS__SEED_UPDATE, ) diff --git a/tests/functional/adapter/simple_seed/test_seed.py b/tests/functional/adapter/simple_seed/test_seed.py index d01f5fe3159..536ed7ad017 100644 --- a/tests/functional/adapter/simple_seed/test_seed.py +++ b/tests/functional/adapter/simple_seed/test_seed.py @@ -1,36 +1,34 @@ import csv -import pytest - from codecs import BOM_UTF8 from pathlib import Path +import pytest + from dbt.tests.util import ( + check_relations_equal, + check_table_does_exist, + check_table_does_not_exist, copy_file, mkdir, + read_file, rm_dir, run_dbt, - read_file, - check_relations_equal, - check_table_does_exist, - check_table_does_not_exist, ) - from tests.functional.adapter.simple_seed.fixtures import ( models__downstream_from_seed_actual, - models__from_basic_seed, models__downstream_from_seed_pipe_separated, + models__from_basic_seed, ) - from tests.functional.adapter.simple_seed.seeds import ( seed__actual_csv, - seeds__expected_sql, - seeds__enabled_in_config_csv, - seeds__disabled_in_config_csv, - seeds__tricky_csv, - seeds__wont_parse_csv, seed__unicode_csv, seed__with_dots_csv, + seeds__disabled_in_config_csv, + seeds__enabled_in_config_csv, + seeds__expected_sql, seeds__pipe_separated_csv, + seeds__tricky_csv, + seeds__wont_parse_csv, ) diff --git a/tests/functional/adapter/simple_seed/test_seed_type_override.py b/tests/functional/adapter/simple_seed/test_seed_type_override.py index 438e0bf5047..d4142f860bb 100644 --- a/tests/functional/adapter/simple_seed/test_seed_type_override.py +++ b/tests/functional/adapter/simple_seed/test_seed_type_override.py @@ -1,15 +1,13 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.adapter.simple_seed.fixtures import ( macros__schema_test, properties__schema_yml, ) - from tests.functional.adapter.simple_seed.seeds import ( - seeds__enabled_in_config_csv, seeds__disabled_in_config_csv, + seeds__enabled_in_config_csv, seeds__tricky_csv, ) diff --git a/tests/functional/adapter/simple_snapshot/common.py b/tests/functional/adapter/simple_snapshot/common.py index 726cc292e02..8eafeb90cb4 100644 --- a/tests/functional/adapter/simple_snapshot/common.py +++ b/tests/functional/adapter/simple_snapshot/common.py @@ -1,7 +1,7 @@ from typing import Dict, List -from dbt.tests.util import relation_from_name from dbt.tests.fixtures.project import TestProjInfo +from dbt.tests.util import relation_from_name def get_records( diff --git a/tests/functional/adapter/simple_snapshot/test_snapshot.py b/tests/functional/adapter/simple_snapshot/test_snapshot.py index e230c395ce1..05f08bb3928 100644 --- a/tests/functional/adapter/simple_snapshot/test_snapshot.py +++ b/tests/functional/adapter/simple_snapshot/test_snapshot.py @@ -1,10 +1,9 @@ +from typing import Dict, Iterable, List + import pytest -from typing import Dict, List, Iterable from dbt.tests.util import run_dbt - -from tests.functional.adapter.simple_snapshot import common -from tests.functional.adapter.simple_snapshot import seeds, snapshots +from tests.functional.adapter.simple_snapshot import common, seeds, snapshots MODEL_FACT_SQL = """ {{ config(materialized="table") }} diff --git a/tests/functional/adapter/store_test_failures_tests/basic.py b/tests/functional/adapter/store_test_failures_tests/basic.py index 3f5b8a29a53..88196d1f308 100644 --- a/tests/functional/adapter/store_test_failures_tests/basic.py +++ b/tests/functional/adapter/store_test_failures_tests/basic.py @@ -4,11 +4,9 @@ import pytest from dbt.artifacts.schemas.results import TestStatus -from dbt.tests.util import run_dbt, check_relation_types - +from dbt.tests.util import check_relation_types, run_dbt from tests.functional.adapter.store_test_failures_tests import _files - TestResult = namedtuple("TestResult", ["name", "status", "type"]) diff --git a/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py index edf85105a67..40546cea9e9 100644 --- a/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py +++ b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py @@ -1,20 +1,16 @@ import pytest -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) - +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.adapter.store_test_failures_tests.fixtures import ( - seeds__people, + models__file_model_but_with_a_no_good_very_long_name, + models__fine_model, + models__problematic_model, + properties__schema_yml, seeds__expected_accepted_values, seeds__expected_failing_test, seeds__expected_not_null_problematic_model_id, seeds__expected_unique_problematic_model_id, - properties__schema_yml, - models__problematic_model, - models__fine_model, - models__file_model_but_with_a_no_good_very_long_name, + seeds__people, tests__failing_test, tests__passing_test, ) diff --git a/tests/functional/adapter/utils/base_array_utils.py b/tests/functional/adapter/utils/base_array_utils.py index 4c75a8650cb..3abc2ffce64 100644 --- a/tests/functional/adapter/utils/base_array_utils.py +++ b/tests/functional/adapter/utils/base_array_utils.py @@ -1,5 +1,5 @@ +from dbt.tests.util import check_relations_equal, get_relation_columns, run_dbt from tests.functional.adapter.utils.base_utils import BaseUtils -from dbt.tests.util import run_dbt, check_relations_equal, get_relation_columns class BaseArrayUtils(BaseUtils): diff --git a/tests/functional/adapter/utils/base_utils.py b/tests/functional/adapter/utils/base_utils.py index 622b4ab4224..75672e70090 100644 --- a/tests/functional/adapter/utils/base_utils.py +++ b/tests/functional/adapter/utils/base_utils.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt macros__equals_sql = """ diff --git a/tests/functional/adapter/utils/data_types/base_data_type_macro.py b/tests/functional/adapter/utils/data_types/base_data_type_macro.py index 07eb07d8911..b5fe690ff56 100644 --- a/tests/functional/adapter/utils/data_types/base_data_type_macro.py +++ b/tests/functional/adapter/utils/data_types/base_data_type_macro.py @@ -1,4 +1,4 @@ -from dbt.tests.util import run_dbt, check_relations_equal, get_relation_columns +from dbt.tests.util import check_relations_equal, get_relation_columns, run_dbt class BaseDataTypeMacro: diff --git a/tests/functional/adapter/utils/data_types/test_type_bigint.py b/tests/functional/adapter/utils/data_types/test_type_bigint.py index 7b6d31882ba..946f8b888c8 100644 --- a/tests/functional/adapter/utils/data_types/test_type_bigint.py +++ b/tests/functional/adapter/utils/data_types/test_type_bigint.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) models__expected_sql = """ select 9223372036854775800 as bigint_col diff --git a/tests/functional/adapter/utils/data_types/test_type_boolean.py b/tests/functional/adapter/utils/data_types/test_type_boolean.py index 14ef64b1a15..db2fcd5459a 100644 --- a/tests/functional/adapter/utils/data_types/test_type_boolean.py +++ b/tests/functional/adapter/utils/data_types/test_type_boolean.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """boolean_col True diff --git a/tests/functional/adapter/utils/data_types/test_type_float.py b/tests/functional/adapter/utils/data_types/test_type_float.py index 8f3077dbd1b..e3c75b6e7b8 100644 --- a/tests/functional/adapter/utils/data_types/test_type_float.py +++ b/tests/functional/adapter/utils/data_types/test_type_float.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """float_col 1.2345 diff --git a/tests/functional/adapter/utils/data_types/test_type_int.py b/tests/functional/adapter/utils/data_types/test_type_int.py index cc93e6f0f4c..9836730da95 100644 --- a/tests/functional/adapter/utils/data_types/test_type_int.py +++ b/tests/functional/adapter/utils/data_types/test_type_int.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """int_col 12345678 diff --git a/tests/functional/adapter/utils/data_types/test_type_numeric.py b/tests/functional/adapter/utils/data_types/test_type_numeric.py index 0ba4d88b1fe..fd8fef883fc 100644 --- a/tests/functional/adapter/utils/data_types/test_type_numeric.py +++ b/tests/functional/adapter/utils/data_types/test_type_numeric.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """numeric_col 1.2345 diff --git a/tests/functional/adapter/utils/data_types/test_type_string.py b/tests/functional/adapter/utils/data_types/test_type_string.py index 14486e8e871..1b39a038a1f 100644 --- a/tests/functional/adapter/utils/data_types/test_type_string.py +++ b/tests/functional/adapter/utils/data_types/test_type_string.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """string_col "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." diff --git a/tests/functional/adapter/utils/data_types/test_type_timestamp.py b/tests/functional/adapter/utils/data_types/test_type_timestamp.py index 030e664914e..d8ebef3fbd7 100644 --- a/tests/functional/adapter/utils/data_types/test_type_timestamp.py +++ b/tests/functional/adapter/utils/data_types/test_type_timestamp.py @@ -1,5 +1,8 @@ import pytest -from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """timestamp_col 2021-01-01 01:01:01 diff --git a/tests/functional/adapter/utils/test_any_value.py b/tests/functional/adapter/utils/test_any_value.py index 4360c537e73..9db412683e6 100644 --- a/tests/functional/adapter/utils/test_any_value.py +++ b/tests/functional/adapter/utils/test_any_value.py @@ -1,10 +1,11 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_any_value import ( - seeds__data_any_value_csv, - seeds__data_any_value_expected_csv, models__test_any_value_sql, models__test_any_value_yml, + seeds__data_any_value_csv, + seeds__data_any_value_expected_csv, ) diff --git a/tests/functional/adapter/utils/test_array_append.py b/tests/functional/adapter/utils/test_array_append.py index 52d9bd590f2..c7f518b99e0 100644 --- a/tests/functional/adapter/utils/test_array_append.py +++ b/tests/functional/adapter/utils/test_array_append.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils from tests.functional.adapter.utils.fixture_array_append import ( models__array_append_actual_sql, diff --git a/tests/functional/adapter/utils/test_array_concat.py b/tests/functional/adapter/utils/test_array_concat.py index 61a0652c220..535223e36f1 100644 --- a/tests/functional/adapter/utils/test_array_concat.py +++ b/tests/functional/adapter/utils/test_array_concat.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils from tests.functional.adapter.utils.fixture_array_concat import ( models__array_concat_actual_sql, diff --git a/tests/functional/adapter/utils/test_array_construct.py b/tests/functional/adapter/utils/test_array_construct.py index 6543b733449..8184d0f1a30 100644 --- a/tests/functional/adapter/utils/test_array_construct.py +++ b/tests/functional/adapter/utils/test_array_construct.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils from tests.functional.adapter.utils.fixture_array_construct import ( models__array_construct_actual_sql, diff --git a/tests/functional/adapter/utils/test_bool_or.py b/tests/functional/adapter/utils/test_bool_or.py index e360ca56a56..70d87dbc9b1 100644 --- a/tests/functional/adapter/utils/test_bool_or.py +++ b/tests/functional/adapter/utils/test_bool_or.py @@ -1,10 +1,11 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_bool_or import ( - seeds__data_bool_or_csv, - seeds__data_bool_or_expected_csv, models__test_bool_or_sql, models__test_bool_or_yml, + seeds__data_bool_or_csv, + seeds__data_bool_or_expected_csv, ) diff --git a/tests/functional/adapter/utils/test_cast_bool_to_text.py b/tests/functional/adapter/utils/test_cast_bool_to_text.py index 728caf1ce89..c9a9b8a89b6 100644 --- a/tests/functional/adapter/utils/test_cast_bool_to_text.py +++ b/tests/functional/adapter/utils/test_cast_bool_to_text.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_cast_bool_to_text import ( models__test_cast_bool_to_text_sql, diff --git a/tests/functional/adapter/utils/test_concat.py b/tests/functional/adapter/utils/test_concat.py index 766ef1790e2..7d9a09adc5d 100644 --- a/tests/functional/adapter/utils/test_concat.py +++ b/tests/functional/adapter/utils/test_concat.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_concat import ( - seeds__data_concat_csv, models__test_concat_sql, models__test_concat_yml, + seeds__data_concat_csv, ) diff --git a/tests/functional/adapter/utils/test_current_timestamp.py b/tests/functional/adapter/utils/test_current_timestamp.py index 2a071c4ba2a..de97c71024c 100644 --- a/tests/functional/adapter/utils/test_current_timestamp.py +++ b/tests/functional/adapter/utils/test_current_timestamp.py @@ -1,12 +1,8 @@ -import pytest - -from datetime import datetime -from datetime import timezone -from datetime import timedelta +from datetime import datetime, timedelta, timezone -from dbt.tests.util import run_dbt -from dbt.tests.util import relation_from_name +import pytest +from dbt.tests.util import relation_from_name, run_dbt models__current_ts_sql = """ select {{ dbt.current_timestamp() }} as current_ts_column diff --git a/tests/functional/adapter/utils/test_date_spine.py b/tests/functional/adapter/utils/test_date_spine.py index 0b4b3b95279..0c6545cdba0 100644 --- a/tests/functional/adapter/utils/test_date_spine.py +++ b/tests/functional/adapter/utils/test_date_spine.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_date_spine import ( models__test_date_spine_sql, diff --git a/tests/functional/adapter/utils/test_date_trunc.py b/tests/functional/adapter/utils/test_date_trunc.py index 977a2845efb..7c781c1fbd3 100644 --- a/tests/functional/adapter/utils/test_date_trunc.py +++ b/tests/functional/adapter/utils/test_date_trunc.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_date_trunc import ( - seeds__data_date_trunc_csv, models__test_date_trunc_sql, models__test_date_trunc_yml, + seeds__data_date_trunc_csv, ) diff --git a/tests/functional/adapter/utils/test_dateadd.py b/tests/functional/adapter/utils/test_dateadd.py index 01171bff3fd..dcb8e78e38c 100644 --- a/tests/functional/adapter/utils/test_dateadd.py +++ b/tests/functional/adapter/utils/test_dateadd.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_dateadd import ( - seeds__data_dateadd_csv, models__test_dateadd_sql, models__test_dateadd_yml, + seeds__data_dateadd_csv, ) diff --git a/tests/functional/adapter/utils/test_datediff.py b/tests/functional/adapter/utils/test_datediff.py index 291e5788997..df21dd5a361 100644 --- a/tests/functional/adapter/utils/test_datediff.py +++ b/tests/functional/adapter/utils/test_datediff.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_datediff import ( - seeds__data_datediff_csv, models__test_datediff_sql, models__test_datediff_yml, + seeds__data_datediff_csv, ) diff --git a/tests/functional/adapter/utils/test_equals.py b/tests/functional/adapter/utils/test_equals.py index 4fdf5e1eae8..46a8c9ec462 100644 --- a/tests/functional/adapter/utils/test_equals.py +++ b/tests/functional/adapter/utils/test_equals.py @@ -1,11 +1,12 @@ import pytest + +from dbt.tests.util import relation_from_name, run_dbt from tests.functional.adapter.utils.base_utils import macros__equals_sql from tests.functional.adapter.utils.fixture_equals import ( - SEEDS__DATA_EQUALS_CSV, MODELS__EQUAL_VALUES_SQL, MODELS__NOT_EQUAL_VALUES_SQL, + SEEDS__DATA_EQUALS_CSV, ) -from dbt.tests.util import run_dbt, relation_from_name class BaseEquals: diff --git a/tests/functional/adapter/utils/test_escape_single_quotes.py b/tests/functional/adapter/utils/test_escape_single_quotes.py index 5847b4cca59..a9f2715130d 100644 --- a/tests/functional/adapter/utils/test_escape_single_quotes.py +++ b/tests/functional/adapter/utils/test_escape_single_quotes.py @@ -1,8 +1,9 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_escape_single_quotes import ( - models__test_escape_single_quotes_quote_sql, models__test_escape_single_quotes_backslash_sql, + models__test_escape_single_quotes_quote_sql, models__test_escape_single_quotes_yml, ) diff --git a/tests/functional/adapter/utils/test_except.py b/tests/functional/adapter/utils/test_except.py index b34b7f5569c..bb73a3aa181 100644 --- a/tests/functional/adapter/utils/test_except.py +++ b/tests/functional/adapter/utils/test_except.py @@ -1,18 +1,19 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_except import ( - seeds__data_except_a_csv, - seeds__data_except_b_csv, - seeds__data_except_a_minus_b_csv, - seeds__data_except_b_minus_a_csv, models__data_except_empty_sql, - models__test_except_a_minus_b_sql, - models__test_except_b_minus_a_sql, models__test_except_a_minus_a_sql, + models__test_except_a_minus_b_sql, models__test_except_a_minus_empty_sql, + models__test_except_b_minus_a_sql, models__test_except_empty_minus_a_sql, models__test_except_empty_minus_empty_sql, + seeds__data_except_a_csv, + seeds__data_except_a_minus_b_csv, + seeds__data_except_b_csv, + seeds__data_except_b_minus_a_csv, ) diff --git a/tests/functional/adapter/utils/test_generate_series.py b/tests/functional/adapter/utils/test_generate_series.py index 402adb4964a..19252b40c81 100644 --- a/tests/functional/adapter/utils/test_generate_series.py +++ b/tests/functional/adapter/utils/test_generate_series.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_generate_series import ( models__test_generate_series_sql, diff --git a/tests/functional/adapter/utils/test_get_intervals_between.py b/tests/functional/adapter/utils/test_get_intervals_between.py index b21087ed795..f73b9ba4885 100644 --- a/tests/functional/adapter/utils/test_get_intervals_between.py +++ b/tests/functional/adapter/utils/test_get_intervals_between.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_get_intervals_between import ( models__test_get_intervals_between_sql, diff --git a/tests/functional/adapter/utils/test_get_powers_of_two.py b/tests/functional/adapter/utils/test_get_powers_of_two.py index 26842140a12..dd1922a2f4c 100644 --- a/tests/functional/adapter/utils/test_get_powers_of_two.py +++ b/tests/functional/adapter/utils/test_get_powers_of_two.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_get_powers_of_two import ( models__test_get_powers_of_two_sql, diff --git a/tests/functional/adapter/utils/test_hash.py b/tests/functional/adapter/utils/test_hash.py index dc5a5c0e075..4237ec14d8a 100644 --- a/tests/functional/adapter/utils/test_hash.py +++ b/tests/functional/adapter/utils/test_hash.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_hash import ( - seeds__data_hash_csv, models__test_hash_sql, models__test_hash_yml, + seeds__data_hash_csv, ) diff --git a/tests/functional/adapter/utils/test_intersect.py b/tests/functional/adapter/utils/test_intersect.py index 2d6221b654d..a7911afeb93 100644 --- a/tests/functional/adapter/utils/test_intersect.py +++ b/tests/functional/adapter/utils/test_intersect.py @@ -1,17 +1,18 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_intersect import ( - seeds__data_intersect_a_csv, - seeds__data_intersect_b_csv, - seeds__data_intersect_a_overlap_b_csv, models__data_intersect_empty_sql, - models__test_intersect_a_overlap_b_sql, - models__test_intersect_b_overlap_a_sql, models__test_intersect_a_overlap_a_sql, + models__test_intersect_a_overlap_b_sql, models__test_intersect_a_overlap_empty_sql, + models__test_intersect_b_overlap_a_sql, models__test_intersect_empty_overlap_a_sql, models__test_intersect_empty_overlap_empty_sql, + seeds__data_intersect_a_csv, + seeds__data_intersect_a_overlap_b_csv, + seeds__data_intersect_b_csv, ) diff --git a/tests/functional/adapter/utils/test_last_day.py b/tests/functional/adapter/utils/test_last_day.py index 030b9b95802..4b05d869738 100644 --- a/tests/functional/adapter/utils/test_last_day.py +++ b/tests/functional/adapter/utils/test_last_day.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_last_day import ( - seeds__data_last_day_csv, models__test_last_day_sql, models__test_last_day_yml, + seeds__data_last_day_csv, ) diff --git a/tests/functional/adapter/utils/test_length.py b/tests/functional/adapter/utils/test_length.py index 24f93c1edbe..ede2b0821a9 100644 --- a/tests/functional/adapter/utils/test_length.py +++ b/tests/functional/adapter/utils/test_length.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_length import ( - seeds__data_length_csv, models__test_length_sql, models__test_length_yml, + seeds__data_length_csv, ) diff --git a/tests/functional/adapter/utils/test_listagg.py b/tests/functional/adapter/utils/test_listagg.py index e1e8076ac36..e838a369ebb 100644 --- a/tests/functional/adapter/utils/test_listagg.py +++ b/tests/functional/adapter/utils/test_listagg.py @@ -1,10 +1,11 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_listagg import ( - seeds__data_listagg_csv, - seeds__data_listagg_output_csv, models__test_listagg_sql, models__test_listagg_yml, + seeds__data_listagg_csv, + seeds__data_listagg_output_csv, ) diff --git a/tests/functional/adapter/utils/test_null_compare.py b/tests/functional/adapter/utils/test_null_compare.py index f1c1a83cbcc..161b6bb0110 100644 --- a/tests/functional/adapter/utils/test_null_compare.py +++ b/tests/functional/adapter/utils/test_null_compare.py @@ -1,5 +1,6 @@ import pytest +from dbt.tests.util import run_dbt from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_null_compare import ( MODELS__TEST_MIXED_NULL_COMPARE_SQL, @@ -7,7 +8,6 @@ MODELS__TEST_NULL_COMPARE_SQL, MODELS__TEST_NULL_COMPARE_YML, ) -from dbt.tests.util import run_dbt class BaseMixedNullCompare(BaseUtils): diff --git a/tests/functional/adapter/utils/test_position.py b/tests/functional/adapter/utils/test_position.py index 314048d82d5..4558ed17be5 100644 --- a/tests/functional/adapter/utils/test_position.py +++ b/tests/functional/adapter/utils/test_position.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_position import ( - seeds__data_position_csv, models__test_position_sql, models__test_position_yml, + seeds__data_position_csv, ) diff --git a/tests/functional/adapter/utils/test_replace.py b/tests/functional/adapter/utils/test_replace.py index b94f9ada4de..7e7d558064c 100644 --- a/tests/functional/adapter/utils/test_replace.py +++ b/tests/functional/adapter/utils/test_replace.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_replace import ( - seeds__data_replace_csv, models__test_replace_sql, models__test_replace_yml, + seeds__data_replace_csv, ) diff --git a/tests/functional/adapter/utils/test_right.py b/tests/functional/adapter/utils/test_right.py index aef52a94615..05fbad4ddf5 100644 --- a/tests/functional/adapter/utils/test_right.py +++ b/tests/functional/adapter/utils/test_right.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_right import ( - seeds__data_right_csv, models__test_right_sql, models__test_right_yml, + seeds__data_right_csv, ) diff --git a/tests/functional/adapter/utils/test_safe_cast.py b/tests/functional/adapter/utils/test_safe_cast.py index 07d82867f2c..1052cec209d 100644 --- a/tests/functional/adapter/utils/test_safe_cast.py +++ b/tests/functional/adapter/utils/test_safe_cast.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_safe_cast import ( - seeds__data_safe_cast_csv, models__test_safe_cast_sql, models__test_safe_cast_yml, + seeds__data_safe_cast_csv, ) diff --git a/tests/functional/adapter/utils/test_split_part.py b/tests/functional/adapter/utils/test_split_part.py index e8e076c43a5..0fee3db618f 100644 --- a/tests/functional/adapter/utils/test_split_part.py +++ b/tests/functional/adapter/utils/test_split_part.py @@ -1,9 +1,10 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_split_part import ( - seeds__data_split_part_csv, models__test_split_part_sql, models__test_split_part_yml, + seeds__data_split_part_csv, ) diff --git a/tests/functional/adapter/utils/test_string_literal.py b/tests/functional/adapter/utils/test_string_literal.py index ab0d9ea83a8..f83a8f43438 100644 --- a/tests/functional/adapter/utils/test_string_literal.py +++ b/tests/functional/adapter/utils/test_string_literal.py @@ -1,4 +1,5 @@ import pytest + from tests.functional.adapter.utils.base_utils import BaseUtils from tests.functional.adapter.utils.fixture_string_literal import ( models__test_string_literal_sql, diff --git a/tests/functional/adapter/utils/test_timestamps.py b/tests/functional/adapter/utils/test_timestamps.py index 3fb3b2cd13d..adc5477373e 100644 --- a/tests/functional/adapter/utils/test_timestamps.py +++ b/tests/functional/adapter/utils/test_timestamps.py @@ -1,5 +1,7 @@ -import pytest import re + +import pytest + from dbt.tests.util import check_relation_has_expected_schema, run_dbt _MODEL_CURRENT_TIMESTAMP = """ diff --git a/tests/functional/adapter/utils/test_validate_sql.py b/tests/functional/adapter/utils/test_validate_sql.py index 5e03da33979..4c125d57721 100644 --- a/tests/functional/adapter/utils/test_validate_sql.py +++ b/tests/functional/adapter/utils/test_validate_sql.py @@ -3,8 +3,8 @@ import pytest from dbt.adapters.base.impl import BaseAdapter -from dbt.exceptions import DbtRuntimeError from dbt.adapters.exceptions import InvalidConnectionError +from dbt.exceptions import DbtRuntimeError class BaseValidateSqlMethod: diff --git a/tests/functional/analysis/test_analyses.py b/tests/functional/analysis/test_analyses.py index 61f66bc1dbc..e061039117f 100644 --- a/tests/functional/analysis/test_analyses.py +++ b/tests/functional/analysis/test_analyses.py @@ -1,8 +1,8 @@ import os -import pytest -from dbt.tests.util import run_dbt, get_manifest +import pytest +from dbt.tests.util import get_manifest, run_dbt my_model_sql = """ select 1 as id diff --git a/tests/functional/artifacts/data/state/v12/manifest.json b/tests/functional/artifacts/data/state/v12/manifest.json index d0ec334873c..25d83c3e796 100644 --- a/tests/functional/artifacts/data/state/v12/manifest.json +++ b/tests/functional/artifacts/data/state/v12/manifest.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-13T17:51:37.252335Z", "invocation_id": "ea31128b-c8be-4ccf-806a-112748d83b11", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.4497569, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.062557, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17024898921033785545_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state"}, "created_at": 1702489893.131624, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.215913, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1702489893.278812, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.396907, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/1j/l_jt_2w16t5dnplmd2n0cr880000gq/T/pytest-of-gerda/pytest-106/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.496192, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.4976692, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1702489893.6685581}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.459133, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.459455, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.45973, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460128, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4604428, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460591, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460742, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4608908, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.462921, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.463424, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4643211, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.464503, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4763231, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.477042, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4774752, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.477914, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.478566, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.479178, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.479415, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4798899, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4811032, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4822998, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.482568, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.483017, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.483407, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.484009, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.484319, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.48519, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4854872, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.485644, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.485892, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.486088, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.486612, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.487571, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.487763, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4881668, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.488352, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.488592, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.489763, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4905322, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.490937, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4914439, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.491634, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4926, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4928472, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4930282, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4938009, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4940412, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4943411, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4951968, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4995358, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.499748, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.500465, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.501039, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5025449, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.502815, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503018, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503212, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503405, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503903, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.504405, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.504831, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.505419, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.505802, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.510586, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.510832, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5111418, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5121439, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.512374, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.512612, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5145621, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.516487, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.521842, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522252, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5224829, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522608, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522808, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522967, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.523251, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.524491, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.524776, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.525128, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5257301, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.534119, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.537909, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5393128, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.539742, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5400288, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as actual_or_expected\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as actual_or_expected\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5407722, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.541282, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.541804, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n {%- endfor -%}\n\n {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.get_expected_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.543893, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.550401, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5509398, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.551308, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.55323, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.553552, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.554471, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.558391, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.56234, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.564464, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.565242, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.566174, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5665019, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5675159, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5758579, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.578167, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5785348, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.579946, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.58032, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.581223, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.58209, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.583271, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5836082, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5838752, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.584299, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.584571, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5850089, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5852711, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.585648, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.585918, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5861251, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5865128, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5934541, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6008031, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.602528, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6041899, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6053782, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.605706, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.605867, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.606281, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.606463, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.611434, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.615864, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6225102, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.623789, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6241221, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.624792, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625057, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625251, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6254442, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625603, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6258318, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6259909, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.626668, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.62693, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.628712, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.629285, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.629807, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63053, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6308942, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.631285, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.631825, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6321821, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.633137, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6336472, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.633909, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63418, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.634453, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6355321, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6373062, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6378188, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.638164, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63853, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.638835, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.639267, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.639553, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6404989, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.641108, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6413922, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.641782, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6422558, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.642634, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.643284, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.643911, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6443572, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.64466, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645021, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645167, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6455371, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645806, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646227, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646407, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646778, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6469781, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.64781, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6480699, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.648458, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6486568, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6490319, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.649228, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.650592, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.650763, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.65154, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.651774, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.651969, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6539361, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.654461, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.654939, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.655298, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6554399, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.655802, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656011, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656379, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656577, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6577091, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.65796, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.658557, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.659515, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.660146, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6604002, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6606479, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.661003, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6611462, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.662261, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.662462, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.664104, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6643698, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.664682, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.66505, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.665251, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.665806, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666029, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666276, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666858, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6673222, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.667722, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6680498, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6688168, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.670816, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6716192, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6720269, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6744618, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.676194, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677233, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6775522, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677862, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677966, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.678944, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6797628, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68008, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.680588, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.681036, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68126, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68159, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.681758, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6828642, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.683444, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.683711, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.684439, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.684801, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6849449, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.685508, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.685786, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686151, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686363, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6867359, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686934, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6873422, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.687535, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.688407, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.688978, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6894479, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6896799, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690075, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690288, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690662, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690897, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691238, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691461, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691802, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691951, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6923468, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6925418, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.692876, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.693096, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694456, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694669, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694897, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6951098, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.695392, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.695667, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6958919, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696158, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696384, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696604, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696844, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.697058, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6972768, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6974878, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6978838, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.698067, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.698406, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6985521, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699027, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699476, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699679, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700398, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700626, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700948, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.701324, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70152, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.702045, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7023842, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7027788, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.702966, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.703469, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.703722, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7039502, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.704203, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.704849, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70506, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7052631, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70541, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7056398, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.705746, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.706052, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.706282, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707399, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707598, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707818, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7083972, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7086651, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.708859, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.709079, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70926, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.711892, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712121, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712429, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712831, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713176, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713624, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713892, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.714118, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.714466, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715341, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715683, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715879, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.716444, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.717024, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7174232, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.717751, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.720056, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7202182, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7204502, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.720602, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.721077, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.721333, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7214751, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7217898, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7220511, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7223868, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.722655, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.722969, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7240279, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7242901, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.724632, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7249548, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.726592, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727339, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727602, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727795, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7286851, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7289228, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.729202, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.729436, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7298071, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.730485, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734237, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734608, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734891, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.735255, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7355149, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.735737, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7359931, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7364202, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7367098, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.737128, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7373838, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.737608, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7378361, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.73805, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.738337, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.738574, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.741552, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.741784, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.742213, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.742526, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.74281, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7430599, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.744768, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745255, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745512, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745993, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7463129, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7471611, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.747524, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.748637, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this) -%}\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7529018, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * FROM dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.753653, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n\n{#-- wrap yaml strings in quotes, apply cast --#}\n{%- for column_name, column_value in row.items() -%}\n{% set row_update = {column_name: column_value} %}\n{%- if column_value is string -%}\n{%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%}\n{%- elif column_value is none -%}\n{%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%}\n{%- else -%}\n{%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%}\n{%- endif -%}\n{%- do row.update(row_update) -%}\n{%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7547069, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.756869, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7570791, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.758251, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7588322, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.75966, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7603211, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7604249, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.761129, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.761452, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7618601, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7623239, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1702489893.579107}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1702489893.629962, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1702489893.63043, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1702489893.632082, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1702489893.633114, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.059992, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17024898921033785545_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state", "enabled": false}, "created_at": 1702489893.159178, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.2125812, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.275594, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.503576, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1702489893.5801542}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1702489893.631113, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.520305, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/1j/l_jt_2w16t5dnplmd2n0cr880000gq/T/pytest-of-gerda/pytest-106/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1702489893.668709}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17024898921033785545_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1702489893.6660612, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.0b3", "generated_at": "2024-05-02T11:13:36.981553Z", "invocation_id": "05015bbc-b4d2-47f4-996f-acac2c7e1a85", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "unrendered_config": {}, "created_at": 1714648415.895201, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.362005, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17146484148326086409_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17146484148326086409_test_previous_version_state"}, "created_at": 1714648415.444036, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.595048, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1714648415.67546, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.774666, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-50/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.947873, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}}, "test.test.check_nothing_my_model_.d5a5e66110": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.949169, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model", "test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1714648416.1599889}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9534872, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.953828, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954112, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954139, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954147, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954156, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954165, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954171, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954188, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954249, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954271, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9542809, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543018, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543092, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954317, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543219, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543302, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954336, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954345, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543512, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954356, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543638, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543731, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954381, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954387, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543931, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954404, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95441, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954416, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954422, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9544299, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954438, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9544508, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954464, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954475, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9544861, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545002, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545112, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954525, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95453, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545379, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954549, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545631, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95458, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954592, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954602, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954616, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954626, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9546409, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954655, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954672, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954685, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954695, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954706, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954721, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954727, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954734, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954742, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954747, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954758, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9547641, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954772, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954792, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548008, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548218, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954828, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954836, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548512, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954856, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954865, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954873, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548821, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549062, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954915, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954925, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954931, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954937, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954944, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954953, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549599, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549649, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549701, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9550028, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9550211, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95505, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955074, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955084, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955091, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9550982, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955111, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955116, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set expected_sql = config.get('expected_sql') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n {%- endfor -%}\n\n {% if not expected_sql %}\n {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}\n {% endif %}\n {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_expected_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955128, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9551501, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9551601, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955165, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955171, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955182, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9551911, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9552078, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955231, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9552588, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955266, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955271, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955279, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9552839, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955302, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955311, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955322, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955331, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955342, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955348, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9553668, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955382, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9553878, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9553962, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955402, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554088, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955414, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955422, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554272, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955432, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554381, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955447, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95547, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554932, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955505, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955515, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955522, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955536, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955542, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955552, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95556, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9555771, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9555998, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955629, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955638, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9556448, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9556532, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955659, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955667, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955682, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955688, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955693, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9557, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9557052, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955714, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955719, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955734, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955761, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955776, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955782, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955789, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955803, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955809, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95582, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955827, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955837, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955842, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955847, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955864, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955874, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955892, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955897, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955909, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9559171, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95593, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955936, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955948, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955955, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955966, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955971, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955989, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9559941, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956007, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956013, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956028, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956036, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956047, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956052, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9560661, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956071, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956081, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956087, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9560971, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561028, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956117, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561229, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956128, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561338, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561448, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956152, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956168, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956178, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561841, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561942, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9562, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956212, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956223, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95623, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9562418, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956247, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956259, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956268, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95628, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956286, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956301, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956308, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956314, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956325, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95633, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956337, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956345, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956357, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956364, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956377, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956382, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956392, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9564018, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956948, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956962, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9569669, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956979, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956987, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956994, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9570198, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9570298, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957046, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957057, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957077, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957084, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957089, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9571042, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957112, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957119, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957124, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9571369, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957142, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957155, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957164, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957172, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95718, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957191, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9571981, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957212, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957218, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9572291, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957238, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957244, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95725, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957265, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957271, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957282, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957288, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957299, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9573078, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957319, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9573271, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95734, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957345, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957356, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957362, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957373, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957379, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957394, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957412, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957418, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957431, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957438, "supported_languages": null}, "macro.dbt.cast": {"name": "cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.cast", "macro_sql": "{% macro cast(field, type) %}\n {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957448, "supported_languages": null}, "macro.dbt.default__cast": {"name": "default__cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.default__cast", "macro_sql": "{% macro default__cast(field, type) %}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574552, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574652, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574752, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574862, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957491, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957501, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9575071, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95753, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957543, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957549, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957555, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957561, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957567, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957576, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957581, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957587, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957592, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95762, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957628, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957634, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957639, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95765, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9576561, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957667, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9576719, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957685, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9576929, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957712, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577181, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577239, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957737, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577441, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577558, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957767, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95778, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957788, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957799, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957807, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957813, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957818, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9578319, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957838, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957844, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957852, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9578571, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957862, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957868, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9578738, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957889, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957897, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579039, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95791, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957916, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957922, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957928, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957933, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957949, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579551, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579608, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579709, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957977, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957982, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579918, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957998, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958005, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9580119, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958019, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958025, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958039, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958045, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958066, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958072, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95809, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958095, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958102, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958108, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958116, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958121, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958127, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958132, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9581382, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958146, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958153, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9581592, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958167, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9581718, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958198, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582038, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582121, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958223, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582322, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958237, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582489, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958255, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958264, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958269, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582741, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958279, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582999, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958306, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958313, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9583218, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958328, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958336, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958343, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95836, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958365, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95837, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958377, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9583821, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95839, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958397, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584022, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958407, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958425, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95843, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584372, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958444, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958451, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584582, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n {{ cast('null', col['data_type']) }} as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958466, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958473, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958478, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584892, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958498, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958503, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585102, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958515, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}\n{%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}\n\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{#-- This needs to be a case-insensitive comparison --#}\n{%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(formatted_row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958533, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * from dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958542, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n {#-- generate case-insensitive formatted row --#}\n {% set formatted_row = {} %}\n {%- for column_name, column_value in row.items() -%}\n {% set column_name = column_name|lower %}\n\n {%- if column_name not in column_name_to_data_types %}\n {#-- if user-provided row contains column name that relation does not contain, raise an error --#}\n {% set fixture_name = \"expected output\" if model.resource_type == 'unit_test' else (\"'\" ~ model.name ~ \"'\") %}\n {{ exceptions.raise_compiler_error(\n \"Invalid column name: '\" ~ column_name ~ \"' in unit test fixture for \" ~ fixture_name ~ \".\"\n \"\\nAccepted columns for \" ~ fixture_name ~ \" are: \" ~ (column_name_to_data_types.keys()|list)\n ) }}\n {%- endif -%}\n\n {%- set column_type = column_name_to_data_types[column_name] %}\n\n {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}\n {%- set column_value_clean = column_value -%}\n {%- if column_value is string -%}\n {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}\n {%- elif column_value is none -%}\n {%- set column_value_clean = 'null' -%}\n {%- endif -%}\n\n {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}\n {%- do formatted_row.update(row_update) -%}\n {%- endfor -%}\n {{ return(formatted_row) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.string_literal", "macro.dbt.escape_single_quotes", "macro.dbt.safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585521, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958566, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958571, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585762, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958584, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585888, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958596, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958603, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95939, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.959782, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.960213, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9606311, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1714648416.054376}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1714648416.109668, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1714648416.110243, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1714648416.1137412, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1714648416.114889, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.360441, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17146484148326086409_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17146484148326086409_test_previous_version_state", "enabled": false}, "created_at": 1714648415.499299, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17146484148326086409_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.5861099, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.6666071, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.9626381, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model", "test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1714648416.0555809}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null, "meta": {}}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1714648416.110999, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.984149, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-50/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1714648416.1610181}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17146484148326086409_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1714648416.155906, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} diff --git a/tests/functional/artifacts/expected_manifest.py b/tests/functional/artifacts/expected_manifest.py index 69577b65a6b..68e7799182b 100644 --- a/tests/functional/artifacts/expected_manifest.py +++ b/tests/functional/artifacts/expected_manifest.py @@ -1,7 +1,8 @@ import hashlib -import dbt import os from unittest.mock import ANY + +import dbt from dbt.tests.util import AnyStringWith # This produces an "expected manifest", with a number of the fields @@ -279,9 +280,9 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "group": None, "schema": my_schema_name, "database": model_database, - "deferred": False, "alias": "model", "description": "The test model", + "primary_key": ["id"], "columns": { "id": { "name": "id", @@ -372,9 +373,9 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "group": None, "schema": alternate_schema, "database": project.database, - "deferred": False, "alias": "second_model", "description": "The second test model", + "primary_key": [], "columns": { "id": { "name": "id", @@ -457,7 +458,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "schema": my_schema_name, "database": project.database, "alias": "seed", - "deferred": False, "description": "The test seed", "columns": { "id": { @@ -530,7 +530,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.test.model"], }, - "deferred": False, "description": "", "file_key_name": "models.model", "fqn": ["test", "not_null_model_id"], @@ -580,7 +579,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "contract": {"checksum": None, "enforced": False, "alias_types": True}, "database": project.database, "group": None, - "deferred": False, "depends_on": { "macros": [], "nodes": ["seed.test.seed"], @@ -632,7 +630,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "macros": ["macro.test.test_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.model"], }, - "deferred": False, "description": "", "file_key_name": "models.model", "fqn": ["test", "test_nothing_model_"], @@ -685,7 +682,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.model"], }, - "deferred": False, "description": "", "file_key_name": "models.model", "fqn": ["test", "unique_model_id"], @@ -929,8 +925,8 @@ def expected_references_manifest(project): "nodes": ["source.test.my_source.my_table"], }, "deprecation_date": None, - "deferred": False, "description": "", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "ephemeral_copy"], "group": None, @@ -995,8 +991,8 @@ def expected_references_manifest(project): "nodes": ["model.test.ephemeral_copy"], }, "deprecation_date": None, - "deferred": False, "description": "A summmary table of the ephemeral copy of the seed data", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "ephemeral_summary"], "group": "test_group", @@ -1064,8 +1060,8 @@ def expected_references_manifest(project): "nodes": ["model.test.ephemeral_summary"], }, "deprecation_date": None, - "deferred": False, "description": "A view of the summary of the ephemeral copy of the seed data", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "view_summary"], "group": None, @@ -1148,7 +1144,6 @@ def expected_references_manifest(project): }, }, "config": get_rendered_seed_config(), - "deferred": False, "depends_on": {"macros": []}, "description": "The test seed", "docs": {"node_color": None, "show": True}, @@ -1183,7 +1178,6 @@ def expected_references_manifest(project): "config": get_rendered_snapshot_config(target_schema=alternate_schema), "contract": {"checksum": None, "enforced": False, "alias_types": True}, "database": model_database, - "deferred": False, "depends_on": {"macros": [], "nodes": ["seed.test.seed"]}, "description": "", "docs": {"node_color": None, "show": True}, @@ -1512,8 +1506,8 @@ def expected_versions_manifest(project): "constraints": [], "sources": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "description": "A versioned model", + "primary_key": ["count", "first_name"], "deprecation_date": ANY, "docs": {"node_color": None, "show": True}, "fqn": ["test", "versioned_model", "v1"], @@ -1583,8 +1577,8 @@ def expected_versions_manifest(project): "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "description": "A versioned model", + "primary_key": ["first_name"], "deprecation_date": None, "docs": {"node_color": None, "show": True}, "fqn": ["test", "versioned_model", "v2"], @@ -1637,8 +1631,8 @@ def expected_versions_manifest(project): ], }, "deprecation_date": None, - "deferred": False, "description": "", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "ref_versioned_model"], "group": None, @@ -1694,7 +1688,6 @@ def expected_versions_manifest(project): "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.versioned_model.v1"], }, - "deferred": False, "description": "", "file_key_name": "models.versioned_model", "fqn": ["test", "unique_versioned_model_v1_first_name"], @@ -1748,7 +1741,6 @@ def expected_versions_manifest(project): "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.versioned_model.v1"], }, - "deferred": False, "description": "", "file_key_name": "models.versioned_model", "fqn": ["test", "unique_versioned_model_v1_count"], @@ -1802,7 +1794,6 @@ def expected_versions_manifest(project): "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.versioned_model.v2"], }, - "deferred": False, "description": "", "file_key_name": "models.versioned_model", "fqn": ["test", "unique_versioned_model_v2_first_name"], diff --git a/tests/functional/artifacts/expected_run_results.py b/tests/functional/artifacts/expected_run_results.py index 889dcf6353d..3a3148eba4d 100644 --- a/tests/functional/artifacts/expected_run_results.py +++ b/tests/functional/artifacts/expected_run_results.py @@ -1,4 +1,5 @@ from unittest.mock import ANY + from dbt.tests.util import AnyFloat diff --git a/tests/functional/artifacts/test_artifact_fields.py b/tests/functional/artifacts/test_artifact_fields.py index fc8e60330cd..74b121b7622 100644 --- a/tests/functional/artifacts/test_artifact_fields.py +++ b/tests/functional/artifacts/test_artifact_fields.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, get_artifact + +from dbt.tests.util import get_artifact, get_manifest, run_dbt # This is a place to put specific tests for contents of artifacts that we # don't want to bother putting in the big artifact output test, which is diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py index 7aadb308513..4126266b129 100644 --- a/tests/functional/artifacts/test_artifacts.py +++ b/tests/functional/artifacts/test_artifacts.py @@ -1,25 +1,30 @@ -import pytest import os from datetime import datetime -import dbt + import jsonschema +import pytest -from dbt.tests.util import run_dbt, get_artifact, check_datetime_between, run_dbt_and_capture +import dbt +from dbt.artifacts.schemas.results import RunStatus +from dbt.artifacts.schemas.run import RunResultsArtifact +from dbt.contracts.graph.manifest import WritableManifest +from dbt.tests.util import ( + check_datetime_between, + get_artifact, + run_dbt, + run_dbt_and_capture, +) from tests.functional.artifacts.expected_manifest import ( - expected_seeded_manifest, expected_references_manifest, + expected_seeded_manifest, expected_versions_manifest, ) from tests.functional.artifacts.expected_run_results import ( - expected_run_results, expected_references_run_results, + expected_run_results, expected_versions_run_results, ) -from dbt.contracts.graph.manifest import WritableManifest -from dbt.artifacts.schemas.results import RunStatus -from dbt.artifacts.schemas.run import RunResultsArtifact - models__schema_yml = """ version: 2 diff --git a/tests/functional/artifacts/test_docs_generate_defer.py b/tests/functional/artifacts/test_docs_generate_defer.py index dc5eef6e030..cbeff63558d 100644 --- a/tests/functional/artifacts/test_docs_generate_defer.py +++ b/tests/functional/artifacts/test_docs_generate_defer.py @@ -1,6 +1,8 @@ import os import shutil + import pytest + from dbt.tests.util import run_dbt model_sql = """ diff --git a/tests/functional/artifacts/test_override.py b/tests/functional/artifacts/test_override.py index 1d4f32030bd..11f4ec200e1 100644 --- a/tests/functional/artifacts/test_override.py +++ b/tests/functional/artifacts/test_override.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.util import run_dbt + from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt model_sql = """ select 1 as id diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py index a8511de5a2b..449c0004566 100644 --- a/tests/functional/artifacts/test_previous_version_state.py +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -4,11 +4,11 @@ import pytest +from dbt.artifacts.exceptions import IncompatibleSchemaError from dbt.artifacts.schemas.base import get_artifact_schema_version -from dbt.contracts.graph.manifest import WritableManifest from dbt.artifacts.schemas.run import RunResultsArtifact -from dbt.artifacts.exceptions import IncompatibleSchemaError -from dbt.tests.util import run_dbt, get_manifest +from dbt.contracts.graph.manifest import WritableManifest +from dbt.tests.util import get_manifest, run_dbt # This project must have one of each kind of node type, plus disabled versions, for # test coverage to be complete. diff --git a/tests/functional/artifacts/test_run_results.py b/tests/functional/artifacts/test_run_results.py index 7f136afdb60..dea947f342b 100644 --- a/tests/functional/artifacts/test_run_results.py +++ b/tests/functional/artifacts/test_run_results.py @@ -1,7 +1,9 @@ +import json from multiprocessing import Process from pathlib import Path -import json + import pytest + from dbt.tests.util import run_dbt good_model_sql = """ @@ -40,6 +42,22 @@ def test_timing_exists(self, project): assert len(results.results[0].timing) > 0 +class TestRunResultsSerializableInContext: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": good_model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "on-run-end": ["{% for result in results %}{{ log(result.to_dict()) }}{% endfor %}"] + } + + def test_results_serializable(self, project): + results = run_dbt(["run"]) + assert len(results.results) == 1 + + # This test is failing due to the faulty assumptions that run_results.json would # be written multiple times. Temporarily disabling. @pytest.mark.skip() diff --git a/tests/functional/assertions/test_runner.py b/tests/functional/assertions/test_runner.py index 160a0f3c5c9..01ebc87339e 100644 --- a/tests/functional/assertions/test_runner.py +++ b/tests/functional/assertions/test_runner.py @@ -3,8 +3,8 @@ from dbt.cli.main import dbtRunner, dbtRunnerResult from dbt.contracts.graph.manifest import Manifest -from dbt_common.events.base_types import EventMsg from dbt.tests.util import get_run_results +from dbt_common.events.base_types import EventMsg def assert_run_results_have_compiled_node_attributes( diff --git a/tests/functional/basic/test_basic.py b/tests/functional/basic/test_basic.py index 836df78f83f..115f5ab206d 100644 --- a/tests/functional/basic/test_basic.py +++ b/tests/functional/basic/test_basic.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.tests.util import get_manifest, run_dbt my_model_sql = """ select 1 as fun diff --git a/tests/functional/basic/test_invalid_reference.py b/tests/functional/basic/test_invalid_reference.py index 1c54d1b906a..9452573c11a 100644 --- a/tests/functional/basic/test_invalid_reference.py +++ b/tests/functional/basic/test_invalid_reference.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt -from dbt.exceptions import CompilationError +from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt descendant_sql = """ -- should be ref('model') diff --git a/tests/functional/basic/test_jaffle_shop.py b/tests/functional/basic/test_jaffle_shop.py index c4ac406d462..c74cbf28298 100644 --- a/tests/functional/basic/test_jaffle_shop.py +++ b/tests/functional/basic/test_jaffle_shop.py @@ -1,6 +1,4 @@ -from dbt.tests.util import run_dbt, get_manifest, run_dbt_and_capture, write_file - - +from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture, write_file from tests.fixtures.jaffle_shop import JaffleShopProject diff --git a/tests/functional/basic/test_mixed_case_db.py b/tests/functional/basic/test_mixed_case_db.py index 13519cc4bb4..8c9cf44fdd5 100644 --- a/tests/functional/basic/test_mixed_case_db.py +++ b/tests/functional/basic/test_mixed_case_db.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.tests.util import get_manifest, run_dbt model_sql = """ select 1 as id diff --git a/tests/functional/basic/test_project.py b/tests/functional/basic/test_project.py index 6602c5e300f..2bdb101c913 100644 --- a/tests/functional/basic/test_project.py +++ b/tests/functional/basic/test_project.py @@ -1,10 +1,11 @@ import os +from pathlib import Path + import pytest import yaml -from pathlib import Path -from dbt.tests.util import run_dbt, update_config_file, write_config_file -from dbt.exceptions import ProjectContractError +from dbt.exceptions import ProjectContractError +from dbt.tests.util import run_dbt, update_config_file, write_config_file simple_model_sql = """ select true as my_column diff --git a/tests/functional/basic/test_simple_reference.py b/tests/functional/basic/test_simple_reference.py index 680a81383c5..22ba540bee6 100644 --- a/tests/functional/basic/test_simple_reference.py +++ b/tests/functional/basic/test_simple_reference.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, copy_file, read_file, check_relations_equal +from dbt.tests.util import check_relations_equal, copy_file, read_file, run_dbt ephemeral_copy_sql = """ {{ diff --git a/tests/functional/basic/test_varchar_widening.py b/tests/functional/basic/test_varchar_widening.py index 3e74629adea..76b62b0ef85 100644 --- a/tests/functional/basic/test_varchar_widening.py +++ b/tests/functional/basic/test_varchar_widening.py @@ -1,7 +1,8 @@ -import pytest import os -from dbt.tests.util import run_dbt, check_relations_equal +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt incremental_sql = """ {{ diff --git a/tests/functional/build_command/test_build.py b/tests/functional/build_command/test_build.py index 01d516213b6..f0464f75dd8 100644 --- a/tests/functional/build_command/test_build.py +++ b/tests/functional/build_command/test_build.py @@ -2,28 +2,28 @@ from dbt.tests.util import run_dbt from tests.functional.build_command.fixtures import ( - seeds__country_csv, - snapshots__snap_0, - snapshots__snap_1, - snapshots__snap_99, - models__test_yml, models__model_0_sql, models__model_1_sql, models__model_2_sql, models__model_3_sql, models__model_99_sql, - models_failing__model_1_sql, + models__test_yml, models_circular_relationship__test_yml, + models_failing__model_1_sql, models_failing_tests__tests_yml, + models_interdependent__model_a_sql, + models_interdependent__model_b_null_sql, + models_interdependent__model_b_sql, + models_interdependent__model_c_sql, + models_interdependent__test_yml, models_simple_blocking__model_a_sql, models_simple_blocking__model_b_sql, models_simple_blocking__test_yml, models_triple_blocking__test_yml, - models_interdependent__test_yml, - models_interdependent__model_a_sql, - models_interdependent__model_b_sql, - models_interdependent__model_b_null_sql, - models_interdependent__model_c_sql, + seeds__country_csv, + snapshots__snap_0, + snapshots__snap_1, + snapshots__snap_99, unit_tests__yml, ) diff --git a/tests/functional/cli/test_cli_exit_codes.py b/tests/functional/cli/test_cli_exit_codes.py index 71c1097ba6a..2853a5d5c3c 100644 --- a/tests/functional/cli/test_cli_exit_codes.py +++ b/tests/functional/cli/test_cli_exit_codes.py @@ -3,7 +3,6 @@ from dbt.cli.exceptions import ResultExit from dbt.cli.main import cli - good_sql = """ select 1 as fun """ diff --git a/tests/functional/cli/test_env_var_deprecations.py b/tests/functional/cli/test_env_var_deprecations.py index 6880cc6890d..42a5afab88e 100644 --- a/tests/functional/cli/test_env_var_deprecations.py +++ b/tests/functional/cli/test_env_var_deprecations.py @@ -1,8 +1,8 @@ -import pytest import os -from dbt.tests.util import read_file, run_dbt +import pytest +from dbt.tests.util import read_file, run_dbt model_one_sql = """ select 1 as fun diff --git a/tests/functional/cli/test_error_handling.py b/tests/functional/cli/test_error_handling.py index 83c8a6fc47c..1eab78a0418 100644 --- a/tests/functional/cli/test_error_handling.py +++ b/tests/functional/cli/test_error_handling.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - model_one_sql = """ someting bad """ diff --git a/tests/functional/cli/test_multioption.py b/tests/functional/cli/test_multioption.py index e9013fdb658..59b233c5a98 100644 --- a/tests/functional/cli/test_multioption.py +++ b/tests/functional/cli/test_multioption.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt model_one_sql = """ select 1 as fun diff --git a/tests/functional/cli/test_resolvers.py b/tests/functional/cli/test_resolvers.py index 2ce8e17ceba..e25b6651aa5 100644 --- a/tests/functional/cli/test_resolvers.py +++ b/tests/functional/cli/test_resolvers.py @@ -1,6 +1,8 @@ +from pathlib import Path + import pytest + from dbt.cli.resolvers import default_log_path -from pathlib import Path class TestDefaultLogPathNoProject: diff --git a/tests/functional/colors/test_colors.py b/tests/functional/colors/test_colors.py index f42591c2b6a..3f731108d18 100644 --- a/tests/functional/colors/test_colors.py +++ b/tests/functional/colors/test_colors.py @@ -1,7 +1,8 @@ -import pytest import re -from dbt.tests.util import run_dbt_and_capture +import pytest + +from dbt.tests.util import run_dbt_and_capture models__do_nothing_then_fail_sql = """ select 1, diff --git a/tests/functional/compile/test_compile.py b/tests/functional/compile/test_compile.py index f7861ca24e2..e7732b09a8c 100644 --- a/tests/functional/compile/test_compile.py +++ b/tests/functional/compile/test_compile.py @@ -1,21 +1,23 @@ import json import pathlib -import pytest import re -from dbt_common.exceptions import DbtRuntimeError, DbtBaseException as DbtException -from dbt.tests.util import run_dbt, run_dbt_and_capture, read_file +import pytest + +from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.exceptions import DbtRuntimeError +from tests.functional.assertions.test_runner import dbtTestRunner from tests.functional.compile.fixtures import ( - first_model_sql, - second_model_sql, first_ephemeral_model_sql, + first_model_sql, + model_multiline_jinja, + schema_yml, second_ephemeral_model_sql, + second_model_sql, third_ephemeral_model_sql, with_recursive_model_sql, - schema_yml, - model_multiline_jinja, ) -from tests.functional.assertions.test_runner import dbtTestRunner def norm_whitespace(string): diff --git a/tests/functional/configs/test_configs.py b/tests/functional/configs/test_configs.py index 7ac6259ac13..8d520f1ff80 100644 --- a/tests/functional/configs/test_configs.py +++ b/tests/functional/configs/test_configs.py @@ -1,9 +1,15 @@ -from dbt_common.dataclass_schema import ValidationError -import pytest import os +import pytest + from dbt.exceptions import ParsingError -from dbt.tests.util import run_dbt, update_config_file, write_file, check_relations_equal +from dbt.tests.util import ( + check_relations_equal, + run_dbt, + update_config_file, + write_file, +) +from dbt_common.dataclass_schema import ValidationError from tests.functional.configs.fixtures import BaseConfigProject, simple_snapshot diff --git a/tests/functional/configs/test_configs_in_schema_files.py b/tests/functional/configs/test_configs_in_schema_files.py index c32eb89a3e0..2c4e2ca8119 100644 --- a/tests/functional/configs/test_configs_in_schema_files.py +++ b/tests/functional/configs/test_configs_in_schema_files.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, check_relations_equal, write_file - from dbt.exceptions import CompilationError, ParsingError +from dbt.tests.util import check_relations_equal, get_manifest, run_dbt, write_file models_alt__schema_yml = """ version: 2 diff --git a/tests/functional/configs/test_contract_configs.py b/tests/functional/configs/test_contract_configs.py index 10a3e778904..abd4b0caadc 100644 --- a/tests/functional/configs/test_contract_configs.py +++ b/tests/functional/configs/test_contract_configs.py @@ -1,7 +1,15 @@ -import pytest import os + +import pytest + from dbt.exceptions import ParsingError, ValidationError -from dbt.tests.util import run_dbt, get_manifest, get_artifact, run_dbt_and_capture, write_file +from dbt.tests.util import ( + get_artifact, + get_manifest, + run_dbt, + run_dbt_and_capture, + write_file, +) my_model_sql = """ {{ diff --git a/tests/functional/configs/test_custom_node_colors_configs.py b/tests/functional/configs/test_custom_node_colors_configs.py index f1ac98a76a9..7772e3d44ca 100644 --- a/tests/functional/configs/test_custom_node_colors_configs.py +++ b/tests/functional/configs/test_custom_node_colors_configs.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest - +from dbt.exceptions import ConfigUpdateError +from dbt.tests.util import get_manifest, run_dbt from dbt_common.dataclass_schema import ValidationError CUSTOM_NODE_COLOR_MODEL_LEVEL = "red" @@ -305,7 +305,7 @@ def test__invalid_color_config_block( self, project, ): - with pytest.raises(ValidationError): + with pytest.raises((ValidationError, ConfigUpdateError)): run_dbt(["compile"]) diff --git a/tests/functional/configs/test_disabled_configs.py b/tests/functional/configs/test_disabled_configs.py index 8fc738977c6..d2ee83e801a 100644 --- a/tests/functional/configs/test_disabled_configs.py +++ b/tests/functional/configs/test_disabled_configs.py @@ -1,7 +1,6 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.configs.fixtures import BaseConfigProject diff --git a/tests/functional/configs/test_disabled_model.py b/tests/functional/configs/test_disabled_model.py index df9886e8067..23cf8fde1e0 100644 --- a/tests/functional/configs/test_disabled_model.py +++ b/tests/functional/configs/test_disabled_model.py @@ -1,22 +1,21 @@ import pytest -from dbt_common.dataclass_schema import ValidationError -from dbt.tests.util import run_dbt, get_manifest from dbt.exceptions import CompilationError, ParsingError - +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.dataclass_schema import ValidationError from tests.functional.configs.fixtures import ( - schema_all_disabled_yml, - schema_partial_enabled_yml, - schema_partial_disabled_yml, - schema_explicit_enabled_yml, - schema_invalid_enabled_yml, my_model, my_model_2, - my_model_2_enabled, my_model_2_disabled, + my_model_2_enabled, my_model_3, my_model_3_disabled, my_model_3_enabled, + schema_all_disabled_yml, + schema_explicit_enabled_yml, + schema_invalid_enabled_yml, + schema_partial_disabled_yml, + schema_partial_enabled_yml, ) diff --git a/tests/functional/configs/test_dupe_paths.py b/tests/functional/configs/test_dupe_paths.py index 95b76f5858f..7a12385a526 100644 --- a/tests/functional/configs/test_dupe_paths.py +++ b/tests/functional/configs/test_dupe_paths.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - my_model_sql = """ select 1 as fun """ diff --git a/tests/functional/configs/test_get_default.py b/tests/functional/configs/test_get_default.py index 597e88c6d65..245ce4c3242 100644 --- a/tests/functional/configs/test_get_default.py +++ b/tests/functional/configs/test_get_default.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - models_get__any_model_sql = """ -- models/any_model.sql select {{ config.get('made_up_nonexistent_key', 'default_value') }} as col_value diff --git a/tests/functional/configs/test_grant_configs.py b/tests/functional/configs/test_grant_configs.py index 64d3f48d4ca..8b1a4e40126 100644 --- a/tests/functional/configs/test_grant_configs.py +++ b/tests/functional/configs/test_grant_configs.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, write_file, write_config_file +from dbt.tests.util import get_manifest, run_dbt, write_config_file, write_file dbt_project_yml = """ models: diff --git a/tests/functional/configs/test_indiv_tests.py b/tests/functional/configs/test_indiv_tests.py index d02be0ba2ca..707d7f66320 100644 --- a/tests/functional/configs/test_indiv_tests.py +++ b/tests/functional/configs/test_indiv_tests.py @@ -1,7 +1,6 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.configs.fixtures import BaseConfigProject diff --git a/tests/functional/configs/test_unused_configs.py b/tests/functional/configs/test_unused_configs.py index e3dfaf8ad58..62b0fc6b3f0 100644 --- a/tests/functional/configs/test_unused_configs.py +++ b/tests/functional/configs/test_unused_configs.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt seeds__seed_csv = """id,value 4,2 diff --git a/tests/functional/configs/test_versioned_model_constraint.py b/tests/functional/configs/test_versioned_model_constraint.py index eb135df5d61..3776585b573 100644 --- a/tests/functional/configs/test_versioned_model_constraint.py +++ b/tests/functional/configs/test_versioned_model_constraint.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, rm_file, write_file, get_manifest -from dbt.exceptions import ParsingError +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file schema_yml = """ models: diff --git a/tests/functional/configs/test_warn_error_options.py b/tests/functional/configs/test_warn_error_options.py new file mode 100644 index 00000000000..d8e68fcf11d --- /dev/null +++ b/tests/functional/configs/test_warn_error_options.py @@ -0,0 +1,206 @@ +from typing import Dict, Union + +import pytest + +from dbt.cli.main import dbtRunner, dbtRunnerResult +from dbt.events.types import DeprecatedModel +from dbt.tests.util import update_config_file +from dbt_common.events.base_types import EventLevel +from tests.utils import EventCatcher + +ModelsDictSpec = Dict[str, Union[str, "ModelsDictSpec"]] + +my_model_sql = """SELECT 1 AS id, 'cats are cute' AS description""" +schema_yml = """ +version: 2 +models: + - name: my_model + deprecation_date: 2020-01-01 +""" + + +@pytest.fixture(scope="class") +def models() -> ModelsDictSpec: + return {"my_model.sql": my_model_sql, "schema.yml": schema_yml} + + +@pytest.fixture(scope="function") +def catcher() -> EventCatcher: + return EventCatcher(event_to_catch=DeprecatedModel) + + +@pytest.fixture(scope="function") +def runner(catcher: EventCatcher) -> dbtRunner: + return dbtRunner(callbacks=[catcher.catch]) + + +def assert_deprecation_warning(result: dbtRunnerResult, catcher: EventCatcher) -> None: + assert result.success + assert result.exception is None + assert len(catcher.caught_events) == 1 + assert catcher.caught_events[0].info.level == EventLevel.WARN.value + + +def assert_deprecation_error(result: dbtRunnerResult) -> None: + assert not result.success + assert result.exception is not None + assert "Model my_model has passed its deprecation date of" in str(result.exception) + + +class TestWarnErrorOptionsFromCLI: + def test_can_silence(self, project, catcher: EventCatcher, runner: dbtRunner) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + catcher.flush() + runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'silence': ['DeprecatedModel']}"] + ) + assert len(catcher.caught_events) == 0 + + def test_can_raise_warning_to_error( + self, project, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'include': ['DeprecatedModel']}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'include': 'all'}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'error': ['DeprecatedModel']}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'error': 'all'}"]) + assert_deprecation_error(result) + + def test_can_exclude_specific_event( + self, project, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run", "--warn-error-options", "{'include': 'all'}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'exclude': ['DeprecatedModel']}"] + ) + assert_deprecation_warning(result, catcher) + + catcher.flush() + result = runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'warn': ['DeprecatedModel']}"] + ) + assert_deprecation_warning(result, catcher) + + def test_cant_set_both_include_and_error(self, project, runner: dbtRunner) -> None: + result = runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'error': 'all'}"] + ) + assert not result.success + assert result.exception is not None + assert "Only `error` or `include` can be specified" in str(result.exception) + + def test_cant_set_both_exclude_and_warn(self, project, runner: dbtRunner) -> None: + result = runner.invoke( + [ + "run", + "--warn-error-options", + "{'include': 'all', 'exclude': ['DeprecatedModel'], 'warn': ['DeprecatedModel']}", + ] + ) + assert not result.success + assert result.exception is not None + assert "Only `warn` or `exclude` can be specified" in str(result.exception) + + +class TestWarnErrorOptionsFromProject: + @pytest.fixture(scope="function") + def clear_project_flags(self, project_root) -> None: + flags = {"flags": {}} + update_config_file(flags, project_root, "dbt_project.yml") + + def test_can_silence( + self, project, clear_project_flags, project_root, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + silence_options = { + "flags": {"warn_error_options": {"include": "all", "silence": ["DeprecatedModel"]}} + } + update_config_file(silence_options, project_root, "dbt_project.yml") + + catcher.flush() + runner.invoke(["run"]) + assert len(catcher.caught_events) == 0 + + def test_can_raise_warning_to_error( + self, project, clear_project_flags, project_root, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + include_options = {"flags": {"warn_error_options": {"include": ["DeprecatedModel"]}}} + update_config_file(include_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert_deprecation_error(result) + + include_options = {"flags": {"warn_error_options": {"include": "all"}}} + update_config_file(include_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert_deprecation_error(result) + + def test_can_exclude_specific_event( + self, project, clear_project_flags, project_root, catcher: EventCatcher, runner: dbtRunner + ) -> None: + include_options = {"flags": {"warn_error_options": {"include": "all"}}} + update_config_file(include_options, project_root, "dbt_project.yml") + result = runner.invoke(["run"]) + assert_deprecation_error(result) + + exclude_options = { + "flags": {"warn_error_options": {"include": "all", "exclude": ["DeprecatedModel"]}} + } + update_config_file(exclude_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + def test_cant_set_both_include_and_error( + self, project, clear_project_flags, project_root, runner: dbtRunner + ) -> None: + exclude_options = {"flags": {"warn_error_options": {"include": "all", "error": "all"}}} + update_config_file(exclude_options, project_root, "dbt_project.yml") + result = runner.invoke(["run"]) + assert not result.success + assert result.exception is not None + assert "Only `error` or `include` can be specified" in str(result.exception) + + def test_cant_set_both_exclude_and_warn( + self, project, clear_project_flags, project_root, runner: dbtRunner + ) -> None: + exclude_options = { + "flags": { + "warn_error_options": { + "include": "all", + "exclude": ["DeprecatedModel"], + "warn": ["DeprecatedModel"], + } + } + } + update_config_file(exclude_options, project_root, "dbt_project.yml") + result = runner.invoke(["run"]) + assert not result.success + assert result.exception is not None + assert "Only `warn` or `exclude` can be specified" in str(result.exception) diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py index 3acaf25170c..727df1b63bc 100644 --- a/tests/functional/context_methods/test_builtin_functions.py +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -1,9 +1,10 @@ -import pytest import json import os -from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file +import pytest + from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file macros__validate_set_sql = """ {% macro validate_set() %} diff --git a/tests/functional/context_methods/test_cli_var_override.py b/tests/functional/context_methods/test_cli_var_override.py index 74e89604f01..b171c880451 100644 --- a/tests/functional/context_methods/test_cli_var_override.py +++ b/tests/functional/context_methods/test_cli_var_override.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - models_override__schema_yml = """ version: 2 models: diff --git a/tests/functional/context_methods/test_cli_vars.py b/tests/functional/context_methods/test_cli_vars.py index 1d72e8c5021..8c563335ed0 100644 --- a/tests/functional/context_methods/test_cli_vars.py +++ b/tests/functional/context_methods/test_cli_vars.py @@ -1,18 +1,16 @@ import pytest import yaml -from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 - +from dbt.exceptions import CompilationError, DbtRuntimeError +from dbt.tests.fixtures.project import write_project_files from dbt.tests.util import ( + get_artifact, + get_logging_events, run_dbt, run_dbt_and_capture, - get_logging_events, - get_artifact, write_config_file, ) -from dbt.tests.fixtures.project import write_project_files -from dbt.exceptions import DbtRuntimeError, CompilationError - +from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 models_complex__schema_yml = """ version: 2 diff --git a/tests/functional/context_methods/test_custom_env_vars.py b/tests/functional/context_methods/test_custom_env_vars.py index e74a5dcee09..bf93d826fcd 100644 --- a/tests/functional/context_methods/test_custom_env_vars.py +++ b/tests/functional/context_methods/test_custom_env_vars.py @@ -1,7 +1,8 @@ -import pytest import json import os +import pytest + from dbt.tests.util import run_dbt_and_capture diff --git a/tests/functional/context_methods/test_env_vars.py b/tests/functional/context_methods/test_env_vars.py index 33feb3b5de1..30c56551c09 100644 --- a/tests/functional/context_methods/test_env_vars.py +++ b/tests/functional/context_methods/test_env_vars.py @@ -1,9 +1,10 @@ -import pytest import os -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.tests.util import run_dbt, get_manifest, run_dbt_and_capture +import pytest +from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture +from dbt_common.constants import SECRET_ENV_PREFIX context_sql = """ @@ -55,13 +56,13 @@ def setup(self): os.environ["DBT_TEST_ENV_VAR"] = "1" os.environ["DBT_TEST_USER"] = "root" os.environ["DBT_TEST_PASS"] = "password" - os.environ[SECRET_ENV_PREFIX + "SECRET"] = "secret_variable" + os.environ[SECRET_ENV_PREFIX + "_SECRET"] = "secret_variable" os.environ["DBT_TEST_NOT_SECRET"] = "regular_variable" os.environ["DBT_TEST_IGNORE_DEFAULT"] = "ignored_default" yield del os.environ["DBT_TEST_ENV_VAR"] del os.environ["DBT_TEST_USER"] - del os.environ[SECRET_ENV_PREFIX + "SECRET"] + del os.environ[SECRET_ENV_PREFIX + "_SECRET"] del os.environ["DBT_TEST_NOT_SECRET"] del os.environ["DBT_TEST_IGNORE_DEFAULT"] diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py index 5319857459f..68ece050ff4 100644 --- a/tests/functional/context_methods/test_secret_env_vars.py +++ b/tests/functional/context_methods/test_secret_env_vars.py @@ -1,11 +1,11 @@ -import pytest import os -from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import ParsingError, DbtInternalError -from tests.functional.context_methods.first_dependency import FirstDependencyProject -from dbt.tests.util import run_dbt, run_dbt_and_capture, read_file +import pytest +from dbt.exceptions import DbtInternalError, ParsingError +from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt_common.constants import SECRET_ENV_PREFIX +from tests.functional.context_methods.first_dependency import FirstDependencyProject secret_bad__context_sql = """ @@ -73,15 +73,15 @@ def test_disallow_secret(self, project): class TestAllowSecretProfilePackage(FirstDependencyProject): @pytest.fixture(scope="class", autouse=True) def setup(self): - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" - os.environ[SECRET_ENV_PREFIX + "PASS"] = "password" - os.environ[SECRET_ENV_PREFIX + "PACKAGE"] = "first_dependency" - os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "_PASS"] = "password" + os.environ[SECRET_ENV_PREFIX + "_PACKAGE"] = "first_dependency" + os.environ[SECRET_ENV_PREFIX + "_GIT_TOKEN"] = "abc123" yield - del os.environ[SECRET_ENV_PREFIX + "USER"] - del os.environ[SECRET_ENV_PREFIX + "PASS"] - del os.environ[SECRET_ENV_PREFIX + "PACKAGE"] - del os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] + del os.environ[SECRET_ENV_PREFIX + "_USER"] + del os.environ[SECRET_ENV_PREFIX + "_PASS"] + del os.environ[SECRET_ENV_PREFIX + "_PACKAGE"] + del os.environ[SECRET_ENV_PREFIX + "_GIT_TOKEN"] @pytest.fixture(scope="class") def models(self): @@ -137,7 +137,7 @@ def test_allow_secrets(self, project, first_dependency): class TestCloneFailSecretScrubbed: @pytest.fixture(scope="class", autouse=True) def setup(self): - os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" + os.environ[SECRET_ENV_PREFIX + "_GIT_TOKEN"] = "abc123" @pytest.fixture(scope="class") def models(self): diff --git a/tests/functional/context_methods/test_var_dependency.py b/tests/functional/context_methods/test_var_dependency.py index 9755c8c9ab8..5822091b3b3 100644 --- a/tests/functional/context_methods/test_var_dependency.py +++ b/tests/functional/context_methods/test_var_dependency.py @@ -1,9 +1,9 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.context_methods.first_dependency import ( - FirstDependencyProject, FirstDependencyConfigProject, + FirstDependencyProject, ) dependency_seeds__root_model_expected_csv = """first_dep_global,from_root diff --git a/tests/functional/context_methods/test_var_in_generate_name.py b/tests/functional/context_methods/test_var_in_generate_name.py index 2bbba457e58..d4c4e81d29a 100644 --- a/tests/functional/context_methods/test_var_in_generate_name.py +++ b/tests/functional/context_methods/test_var_in_generate_name.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, update_config_file from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt, update_config_file model_sql = """ select 1 as id diff --git a/tests/functional/context_methods/test_yaml_functions.py b/tests/functional/context_methods/test_yaml_functions.py index d07fea670d9..e90da5f7254 100644 --- a/tests/functional/context_methods/test_yaml_functions.py +++ b/tests/functional/context_methods/test_yaml_functions.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - tests__from_yaml_sql = """ {% set simplest = (fromyaml('a: 1') == {'a': 1}) %} {% set nested_data %} diff --git a/tests/functional/contracts/test_contract_enforcement.py b/tests/functional/contracts/test_contract_enforcement.py index 78eb2aea556..1d069f204d4 100644 --- a/tests/functional/contracts/test_contract_enforcement.py +++ b/tests/functional/contracts/test_contract_enforcement.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, write_file +from dbt.tests.util import run_dbt, write_file my_model_sql = """ select 'some string' as string_column diff --git a/tests/functional/contracts/test_contract_precision.py b/tests/functional/contracts/test_contract_precision.py index ee5ad4bb50c..a4df2c8ace9 100644 --- a/tests/functional/contracts/test_contract_precision.py +++ b/tests/functional/contracts/test_contract_precision.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt_and_capture +from dbt.tests.util import run_dbt_and_capture my_numeric_model_sql = """ select diff --git a/tests/functional/contracts/test_nonstandard_data_type.py b/tests/functional/contracts/test_nonstandard_data_type.py index 4ee559ff4e5..1bcb5e8bb65 100644 --- a/tests/functional/contracts/test_nonstandard_data_type.py +++ b/tests/functional/contracts/test_nonstandard_data_type.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, run_dbt_and_capture +from dbt.tests.util import run_dbt, run_dbt_and_capture my_numeric_model_sql = """ select diff --git a/tests/functional/custom_aliases/test_custom_aliases.py b/tests/functional/custom_aliases/test_custom_aliases.py index 86b44c3b3f0..22ff536c175 100644 --- a/tests/functional/custom_aliases/test_custom_aliases.py +++ b/tests/functional/custom_aliases/test_custom_aliases.py @@ -1,12 +1,11 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.custom_aliases.fixtures import ( + macros_config_sql, + macros_sql, model1_sql, model2_sql, - macros_sql, - macros_config_sql, schema_yml, ) diff --git a/tests/functional/custom_singular_tests/test_custom_singular_tests.py b/tests/functional/custom_singular_tests/test_custom_singular_tests.py index aec0586b873..4dc638f3827 100644 --- a/tests/functional/custom_singular_tests/test_custom_singular_tests.py +++ b/tests/functional/custom_singular_tests/test_custom_singular_tests.py @@ -1,7 +1,7 @@ -import pytest - from pathlib import Path +import pytest + from dbt.tests.util import run_dbt # from `test/integration/009_data_test` diff --git a/tests/functional/dbt_runner/test_dbt_runner.py b/tests/functional/dbt_runner/test_dbt_runner.py index c332490e2d7..80b94b9c73a 100644 --- a/tests/functional/dbt_runner/test_dbt_runner.py +++ b/tests/functional/dbt_runner/test_dbt_runner.py @@ -2,10 +2,10 @@ import pytest +from dbt.adapters.factory import FACTORY, reset_adapters from dbt.cli.exceptions import DbtUsageException from dbt.cli.main import dbtRunner from dbt.exceptions import DbtProjectError -from dbt.adapters.factory import reset_adapters, FACTORY from dbt.tests.util import read_file, write_file from dbt.version import __version__ as dbt_version from dbt_common.events.contextvars import get_node_info diff --git a/tests/functional/defer_state/test_defer_state.py b/tests/functional/defer_state/test_defer_state.py index f60286b5c43..994ece5aa61 100644 --- a/tests/functional/defer_state/test_defer_state.py +++ b/tests/functional/defer_state/test_defer_state.py @@ -1,4 +1,3 @@ -import json import os import shutil from copy import deepcopy @@ -7,21 +6,21 @@ from dbt.contracts.results import RunStatus from dbt.exceptions import DbtRuntimeError -from dbt.tests.util import run_dbt, write_file, rm_file +from dbt.tests.util import rm_file, run_dbt, write_file from tests.functional.defer_state.fixtures import ( - seed_csv, - table_model_sql, + changed_ephemeral_model_sql, changed_table_model_sql, - view_model_sql, changed_view_model_sql, ephemeral_model_sql, - changed_ephemeral_model_sql, - schema_yml, exposures_yml, - macros_sql, infinite_macros_sql, + macros_sql, + schema_yml, + seed_csv, snapshot_sql, + table_model_sql, view_model_now_table_sql, + view_model_sql, ) @@ -87,17 +86,14 @@ def copy_state(self, project_root): def run_and_save_state(self, project_root, with_snapshot=False): results = run_dbt(["seed"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) results = run_dbt(["run"]) assert len(results) == 2 - assert not any(r.node.deferred for r in results) results = run_dbt(["test"]) assert len(results) == 2 if with_snapshot: results = run_dbt(["snapshot"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) # copy files self.copy_state(project_root) @@ -181,10 +177,6 @@ def test_run_and_defer(self, project, unique_schema, other_schema): assert other_schema not in results[0].node.compiled_code assert unique_schema in results[0].node.compiled_code - with open("target/manifest.json") as fp: - data = json.load(fp) - assert data["nodes"]["seed.test.seed"]["deferred"] - assert len(results) == 1 @@ -238,6 +230,26 @@ def test_run_defer_iff_not_exists(self, project, unique_schema, other_schema): assert len(results) == 2 assert other_schema not in results[0].node.compiled_code + # again with --favor-state, but this time select both the seed and the view + # because the seed is also selected, the view should select from the seed in our schema ('other_schema') + results = run_dbt( + [ + "build", + "--state", + "state", + "--select", + "seed view_model", + "--resource-type", + "seed model", + "--defer", + "--favor-state", + "--target", + "otherschema", + ] + ) + assert len(results) == 2 + assert other_schema in results[1].node.compiled_code + class TestDeferStateDeletedUpstream(BaseDeferState): def test_run_defer_deleted_upstream(self, project, unique_schema, other_schema): diff --git a/tests/functional/defer_state/test_group_updates.py b/tests/functional/defer_state/test_group_updates.py index 884909cf649..ab1cf1f2460 100644 --- a/tests/functional/defer_state/test_group_updates.py +++ b/tests/functional/defer_state/test_group_updates.py @@ -2,19 +2,17 @@ import pytest -from dbt.tests.util import run_dbt, write_file, copy_file from dbt.exceptions import ParsingError - - +from dbt.tests.util import copy_file, run_dbt, write_file from tests.functional.defer_state.fixtures import ( - seed_csv, + group_modified_fail_schema_yml, + group_modified_schema_yml, + group_schema_yml, model_1_sql, - modified_model_1_sql, model_2_sql, + modified_model_1_sql, modified_model_2_sql, - group_schema_yml, - group_modified_schema_yml, - group_modified_fail_schema_yml, + seed_csv, ) diff --git a/tests/functional/defer_state/test_modified_state.py b/tests/functional/defer_state/test_modified_state.py index 40f5e1e31d1..bfd5648ae61 100644 --- a/tests/functional/defer_state/test_modified_state.py +++ b/tests/functional/defer_state/test_modified_state.py @@ -5,42 +5,40 @@ import pytest +from dbt.exceptions import CompilationError, ContractBreakingChangeError from dbt.tests.util import ( + get_manifest, run_dbt, + run_dbt_and_capture, update_config_file, write_file, - get_manifest, - run_dbt_and_capture, ) - -from dbt.exceptions import CompilationError, ContractBreakingChangeError - from tests.functional.defer_state.fixtures import ( - seed_csv, - table_model_sql, - view_model_sql, + constraint_schema_yml, + contract_schema_yml, + disabled_contract_schema_yml, ephemeral_model_sql, - schema_yml, exposures_yml, - macros_sql, infinite_macros_sql, - no_contract_schema_yml, - contract_schema_yml, + macros_sql, + metricflow_time_spine_sql, + modified_column_constraint_schema_yml, modified_contract_schema_yml, - disabled_contract_schema_yml, - constraint_schema_yml, - versioned_no_contract_schema_yml, + modified_model_constraint_schema_yml, + modified_semantic_model_schema_yml, + no_contract_schema_yml, + schema_yml, + seed_csv, + semantic_model_schema_yml, + table_model_now_incremental_sql, + table_model_now_view_sql, + table_model_sql, versioned_contract_schema_yml, versioned_disabled_contract_schema_yml, versioned_modified_contract_schema_yml, - modified_column_constraint_schema_yml, - modified_model_constraint_schema_yml, - table_model_now_view_sql, - table_model_now_incremental_sql, + versioned_no_contract_schema_yml, view_model_now_table_sql, - metricflow_time_spine_sql, - semantic_model_schema_yml, - modified_semantic_model_schema_yml, + view_model_sql, ) diff --git a/tests/functional/defer_state/test_run_results_state.py b/tests/functional/defer_state/test_run_results_state.py index 69dc77a1dd3..e4b467d8e37 100644 --- a/tests/functional/defer_state/test_run_results_state.py +++ b/tests/functional/defer_state/test_run_results_state.py @@ -4,16 +4,15 @@ import pytest from dbt.tests.util import run_dbt, write_file - from tests.functional.defer_state.fixtures import ( - seed_csv, - table_model_sql, - view_model_sql, ephemeral_model_sql, - schema_yml, exposures_yml, - macros_sql, infinite_macros_sql, + macros_sql, + schema_yml, + seed_csv, + table_model_sql, + view_model_sql, ) diff --git a/tests/functional/dependencies/test_dependency_options.py b/tests/functional/dependencies/test_dependency_options.py index 7e451555ac3..067fd7bf1e5 100644 --- a/tests/functional/dependencies/test_dependency_options.py +++ b/tests/functional/dependencies/test_dependency_options.py @@ -1,5 +1,6 @@ import os import shutil + import pytest from dbt.tests.util import run_dbt diff --git a/tests/functional/dependencies/test_dependency_secrets.py b/tests/functional/dependencies/test_dependency_secrets.py new file mode 100644 index 00000000000..02dc4f0f8f1 --- /dev/null +++ b/tests/functional/dependencies/test_dependency_secrets.py @@ -0,0 +1,32 @@ +import os + +import pytest + +from dbt.tests.util import run_dbt_and_capture +from dbt_common.constants import SECRET_ENV_PREFIX + + +class TestSecretInPackage: + @pytest.fixture(scope="class", autouse=True) + def setUp(self): + os.environ[SECRET_ENV_PREFIX + "_FOR_LOGGING"] = "super secret" + yield + del os.environ[SECRET_ENV_PREFIX + "_FOR_LOGGING"] + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "package": "dbt-labs/dbt_utils{{ log(env_var('DBT_ENV_SECRET_FOR_LOGGING'), info = true) }}", + "version": "1.0.0", + } + ] + } + + def test_mask_secrets(self, project): + _, log_output = run_dbt_and_capture(["deps"]) + # this will not be written to logs + assert not ("super secret" in log_output) + assert "*****" in log_output + assert not ("DBT_ENV_SECRET_FOR_LOGGING" in log_output) diff --git a/tests/functional/dependencies/test_local_dependency.py b/tests/functional/dependencies/test_local_dependency.py index 04391e0c872..28558bc725f 100644 --- a/tests/functional/dependencies/test_local_dependency.py +++ b/tests/functional/dependencies/test_local_dependency.py @@ -1,22 +1,22 @@ -import os -import pytest import json +import os import shutil -import yaml - -# todo: make self.unique_schema to fixture - from pathlib import Path from unittest import mock -import dbt_common.semver as semver -import dbt_common.exceptions +import pytest +import yaml + import dbt.config import dbt.exceptions - +import dbt_common.exceptions +import dbt_common.semver as semver from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture from tests.functional.utils import up_one +# todo: make self.unique_schema to fixture + + models__dep_source = """ {# If our dependency source didn't exist, this would be an errror #} select * from {{ source('seed_source', 'seed') }} diff --git a/tests/functional/dependencies/test_simple_dependency.py b/tests/functional/dependencies/test_simple_dependency.py index de06452ec30..e63a648e0be 100644 --- a/tests/functional/dependencies/test_simple_dependency.py +++ b/tests/functional/dependencies/test_simple_dependency.py @@ -1,16 +1,11 @@ import os -import pytest import tempfile - from pathlib import Path -from dbt.exceptions import DbtProjectError -from dbt.tests.util import ( - check_relations_equal, - run_dbt, - write_config_file, -) +import pytest +from dbt.exceptions import DbtProjectError +from dbt.tests.util import check_relations_equal, run_dbt, write_config_file models__disabled_one = """ {{config(enabled=False)}} @@ -434,3 +429,44 @@ def test_malformed_tarball_package_causes_exception(self, project): ) as e: run_dbt(["deps"]) assert e is not None + + +class TestEmptyDependency: + def test_empty_package(self, project): + # We have to specify the bad formatted package here because if we do it + # in a `packages` fixture, the test will blow up in the setup phase, meaning + # we can't appropriately catch it with a `pytest.raises` + empty_hub_package = { + "packages": [ + { + "package": "", + "version": "1.0.0", + } + ] + } + write_config_file(empty_hub_package, "packages.yml") + with pytest.raises(DbtProjectError, match="A hub package is missing the value"): + run_dbt(["deps"]) + + empty_git_package = { + "packages": [ + { + "git": "", + "revision": "1.0.0", + } + ] + } + write_config_file(empty_git_package, "packages.yml") + with pytest.raises(DbtProjectError, match="A git package is missing the value"): + run_dbt(["deps"]) + + empty_local_package = { + "packages": [ + { + "local": "", + } + ] + } + write_config_file(empty_local_package, "packages.yml") + with pytest.raises(DbtProjectError, match="A local package is missing the value"): + run_dbt(["deps"]) diff --git a/tests/functional/dependencies/test_simple_dependency_with_configs.py b/tests/functional/dependencies/test_simple_dependency_with_configs.py index 86ab911a2b1..40642d4e81a 100644 --- a/tests/functional/dependencies/test_simple_dependency_with_configs.py +++ b/tests/functional/dependencies/test_simple_dependency_with_configs.py @@ -1,12 +1,8 @@ -import pytest - from pathlib import Path -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) +import pytest +from dbt.tests.util import check_relations_equal, run_dbt models__view_summary = """ {{ diff --git a/tests/functional/deprecations/model_deprecations.py b/tests/functional/deprecations/model_deprecations.py index 043b7e1e5e9..03e38b1220e 100644 --- a/tests/functional/deprecations/model_deprecations.py +++ b/tests/functional/deprecations/model_deprecations.py @@ -1,10 +1,9 @@ import pytest -from dbt.exceptions import EventCompilationError from dbt.cli.main import dbtRunner +from dbt.exceptions import EventCompilationError from dbt.tests.util import run_dbt - deprecated_model__yml = """ version: 2 diff --git a/tests/functional/deprecations/test_config_deprecations.py b/tests/functional/deprecations/test_config_deprecations.py index ba2d780c999..d4e965a8451 100644 --- a/tests/functional/deprecations/test_config_deprecations.py +++ b/tests/functional/deprecations/test_config_deprecations.py @@ -2,17 +2,16 @@ from dbt import deprecations from dbt.exceptions import CompilationError, ProjectContractError, YamlParseDictError -from dbt.tests.util import run_dbt, update_config_file from dbt.tests.fixtures.project import write_project_files - +from dbt.tests.util import run_dbt, update_config_file from tests.functional.deprecations.fixtures import ( - macros__custom_test_sql, - models_trivial__model_sql, - old_tests_yml, + data_tests_yaml, local_dependency__dbt_project_yml, local_dependency__schema_yml, local_dependency__seed_csv, - data_tests_yaml, + macros__custom_test_sql, + models_trivial__model_sql, + old_tests_yml, seed_csv, sources_old_tests_yaml, test_type_mixed_yaml, diff --git a/tests/functional/deprecations/test_deprecations.py b/tests/functional/deprecations/test_deprecations.py index ba90fec8bb7..8082be7b911 100644 --- a/tests/functional/deprecations/test_deprecations.py +++ b/tests/functional/deprecations/test_deprecations.py @@ -1,13 +1,13 @@ import pytest -import dbt_common +import yaml +import dbt_common from dbt import deprecations +from dbt.tests.util import run_dbt, write_file from tests.functional.deprecations.fixtures import ( - models_trivial__model_sql, bad_name_yaml, + models_trivial__model_sql, ) -from dbt.tests.util import run_dbt, write_file -import yaml class TestConfigPathDeprecation: @@ -120,7 +120,7 @@ def test_exposure_name_fail(self, project): assert expected_msg in exc_str -class TestPrjectFlagsMovedDeprecation: +class TestProjectFlagsMovedDeprecation: @pytest.fixture(scope="class") def profiles_config_update(self): return { diff --git a/tests/functional/docs/test_duplicate_docs_block.py b/tests/functional/docs/test_duplicate_docs_block.py index 89454e7bf55..95262bcb734 100644 --- a/tests/functional/docs/test_duplicate_docs_block.py +++ b/tests/functional/docs/test_duplicate_docs_block.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt import dbt_common.exceptions - +from dbt.tests.util import run_dbt duplicate_doc_blocks_model_sql = "select 1 as id, 'joe' as first_name" diff --git a/tests/functional/docs/test_generate.py b/tests/functional/docs/test_generate.py index 7597fdb41f3..8e5873e602d 100644 --- a/tests/functional/docs/test_generate.py +++ b/tests/functional/docs/test_generate.py @@ -1,8 +1,9 @@ -import pytest from unittest import mock -from dbt.plugins.manifest import PluginNodes, ModelNodeArgs -from dbt.tests.util import run_dbt, get_manifest +import pytest + +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes +from dbt.tests.util import get_manifest, run_dbt sample_seed = """sample_num,sample_bool 1,true diff --git a/tests/functional/docs/test_good_docs_blocks.py b/tests/functional/docs/test_good_docs_blocks.py index 782489854f5..768e6201275 100644 --- a/tests/functional/docs/test_good_docs_blocks.py +++ b/tests/functional/docs/test_good_docs_blocks.py @@ -1,11 +1,11 @@ import json import os from pathlib import Path + import pytest from dbt.tests.util import run_dbt, update_config_file, write_file - good_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" good_docs_blocks_docs_md = """{% docs my_model_doc %} diff --git a/tests/functional/docs/test_invalid_doc_ref.py b/tests/functional/docs/test_invalid_doc_ref.py index 6750f987fce..cfcd65da6e0 100644 --- a/tests/functional/docs/test_invalid_doc_ref.py +++ b/tests/functional/docs/test_invalid_doc_ref.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt import dbt_common.exceptions - +from dbt.tests.util import run_dbt invalid_doc_ref_model_sql = "select 1 as id, 'joe' as first_name" diff --git a/tests/functional/docs/test_missing_docs_blocks.py b/tests/functional/docs/test_missing_docs_blocks.py index 4565d3cc937..193644eca76 100644 --- a/tests/functional/docs/test_missing_docs_blocks.py +++ b/tests/functional/docs/test_missing_docs_blocks.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt import dbt_common.exceptions - +from dbt.tests.util import run_dbt missing_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" diff --git a/tests/functional/docs/test_static.py b/tests/functional/docs/test_static.py index 9434b281188..05f3a1ef7ca 100644 --- a/tests/functional/docs/test_static.py +++ b/tests/functional/docs/test_static.py @@ -1,9 +1,10 @@ +import os + import pytest -from dbt_common.clients.system import load_file_contents from dbt.task.docs import DOCS_INDEX_FILE_PATH from dbt.tests.util import run_dbt -import os +from dbt_common.clients.system import load_file_contents class TestStaticGenerate: diff --git a/tests/functional/duplicates/test_duplicate_analysis.py b/tests/functional/duplicates/test_duplicate_analysis.py index 44dc4c6f167..126f6ae6907 100644 --- a/tests/functional/duplicates/test_duplicate_analysis.py +++ b/tests/functional/duplicates/test_duplicate_analysis.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - my_model_sql = """ select 1 as id """ diff --git a/tests/functional/duplicates/test_duplicate_exposure.py b/tests/functional/duplicates/test_duplicate_exposure.py index 140db21cd07..349c6b78cc7 100644 --- a/tests/functional/duplicates/test_duplicate_exposure.py +++ b/tests/functional/duplicates/test_duplicate_exposure.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - exposure_dupes_schema_yml = """ version: 2 exposures: diff --git a/tests/functional/duplicates/test_duplicate_macro.py b/tests/functional/duplicates/test_duplicate_macro.py index 35b843f5891..0c1ba3d76d3 100644 --- a/tests/functional/duplicates/test_duplicate_macro.py +++ b/tests/functional/duplicates/test_duplicate_macro.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - bad_same_macros_sql = """ {% macro some_macro() %} {% endmacro %} diff --git a/tests/functional/duplicates/test_duplicate_metric.py b/tests/functional/duplicates/test_duplicate_metric.py index a5f6b60e8f3..1b172b09caf 100644 --- a/tests/functional/duplicates/test_duplicate_metric.py +++ b/tests/functional/duplicates/test_duplicate_metric.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - metric_dupes_schema_yml = """ version: 2 diff --git a/tests/functional/duplicates/test_duplicate_model.py b/tests/functional/duplicates/test_duplicate_model.py index be9e8754d1f..01fad6e949d 100644 --- a/tests/functional/duplicates/test_duplicate_model.py +++ b/tests/functional/duplicates/test_duplicate_model.py @@ -1,9 +1,8 @@ import pytest -from dbt.exceptions import CompilationError, AmbiguousAliasError +from dbt.exceptions import AmbiguousAliasError, CompilationError from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import run_dbt, get_manifest - +from dbt.tests.util import get_manifest, run_dbt disabled_model_sql = """ {{ diff --git a/tests/functional/duplicates/test_duplicate_resource.py b/tests/functional/duplicates/test_duplicate_resource.py index 0bc070ff39f..87c52cf5712 100644 --- a/tests/functional/duplicates/test_duplicate_resource.py +++ b/tests/functional/duplicates/test_duplicate_resource.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - models_naming_dupes_schema_yml = """ version: 2 models: diff --git a/tests/functional/duplicates/test_duplicate_source.py b/tests/functional/duplicates/test_duplicate_source.py index 1100345aabc..4b9b6c4a08e 100644 --- a/tests/functional/duplicates/test_duplicate_source.py +++ b/tests/functional/duplicates/test_duplicate_source.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - source_dupes_schema_yml = """ version: 2 sources: diff --git a/tests/functional/exit_codes/test_exit_codes.py b/tests/functional/exit_codes/test_exit_codes.py index 44672beecae..5013d6fc070 100644 --- a/tests/functional/exit_codes/test_exit_codes.py +++ b/tests/functional/exit_codes/test_exit_codes.py @@ -4,10 +4,10 @@ from dbt.tests.util import check_table_does_exist, check_table_does_not_exist, run_dbt from tests.functional.exit_codes.fixtures import ( BaseConfigProject, - snapshots_bad_sql, - snapshots_good_sql, data_seed_bad_csv, data_seed_good_csv, + snapshots_bad_sql, + snapshots_good_sql, ) diff --git a/tests/functional/experimental_parser/test_all_experimental_parser.py b/tests/functional/experimental_parser/test_all_experimental_parser.py index 87e6d2f30e1..ff481cd0c04 100644 --- a/tests/functional/experimental_parser/test_all_experimental_parser.py +++ b/tests/functional/experimental_parser/test_all_experimental_parser.py @@ -1,10 +1,10 @@ -import pytest +import os -from dbt.tests.util import run_dbt +import pytest from dbt.artifacts.resources import RefArgs from dbt.contracts.graph.manifest import Manifest -import os +from dbt.tests.util import run_dbt def get_manifest(): diff --git a/tests/functional/exposures/test_exposure_configs.py b/tests/functional/exposures/test_exposure_configs.py index 8f60778e058..2ec309623a7 100644 --- a/tests/functional/exposures/test_exposure_configs.py +++ b/tests/functional/exposures/test_exposure_configs.py @@ -1,20 +1,19 @@ import pytest from dbt.artifacts.resources import ExposureConfig +from dbt.tests.util import get_manifest, run_dbt, update_config_file from dbt_common.dataclass_schema import ValidationError - -from dbt.tests.util import run_dbt, update_config_file, get_manifest from tests.functional.exposures.fixtures import ( - models_sql, - second_model_sql, - simple_exposure_yml, disabled_models_exposure_yml, enabled_yaml_level_exposure_yml, invalid_config_exposure_yml, - source_schema_yml, metricflow_time_spine_sql, - semantic_models_schema_yml, metrics_schema_yml, + models_sql, + second_model_sql, + semantic_models_schema_yml, + simple_exposure_yml, + source_schema_yml, ) diff --git a/tests/functional/exposures/test_exposures.py b/tests/functional/exposures/test_exposures.py index 1988dd976b3..be42ffd26c0 100644 --- a/tests/functional/exposures/test_exposures.py +++ b/tests/functional/exposures/test_exposures.py @@ -1,14 +1,14 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.tests.util import get_manifest, run_dbt from tests.functional.exposures.fixtures import ( + metricflow_time_spine_sql, + metrics_schema_yml, models_sql, second_model_sql, + semantic_models_schema_yml, simple_exposure_yml, source_schema_yml, - metrics_schema_yml, - semantic_models_schema_yml, - metricflow_time_spine_sql, ) diff --git a/tests/functional/external_reference/test_external_reference.py b/tests/functional/external_reference/test_external_reference.py index 8b5294155d8..7ac561ce862 100644 --- a/tests/functional/external_reference/test_external_reference.py +++ b/tests/functional/external_reference/test_external_reference.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - external_model_sql = """ {{ config( diff --git a/tests/functional/graph_selection/fixtures.py b/tests/functional/graph_selection/fixtures.py index 846502fc6f5..b4d4a677d25 100644 --- a/tests/functional/graph_selection/fixtures.py +++ b/tests/functional/graph_selection/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import read_file +from dbt.tests.util import read_file schema_yml = """ version: 2 diff --git a/tests/functional/graph_selection/test_graph_selection.py b/tests/functional/graph_selection/test_graph_selection.py index 9000fb73924..36a8203ef4f 100644 --- a/tests/functional/graph_selection/test_graph_selection.py +++ b/tests/functional/graph_selection/test_graph_selection.py @@ -1,11 +1,11 @@ -import os import json +import os + import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.graph_selection.fixtures import SelectionFixtures - selectors_yml = """ selectors: - name: bi_selector diff --git a/tests/functional/graph_selection/test_group_selection.py b/tests/functional/graph_selection/test_group_selection.py index c14f008b7d9..fe028666956 100644 --- a/tests/functional/graph_selection/test_group_selection.py +++ b/tests/functional/graph_selection/test_group_selection.py @@ -1,22 +1,21 @@ import pytest -from dbt.tests.util import run_dbt, read_file +from dbt.tests.util import read_file, run_dbt from tests.functional.graph_selection.fixtures import ( - schema_yml, + alternative_users_sql, base_users_sql, - users_sql, - users_rollup_sql, - users_rollup_dependency_sql, - emails_sql, emails_alt_sql, - alternative_users_sql, - never_selected_sql, - subdir_sql, + emails_sql, nested_users_sql, + never_selected_sql, properties_yml, + schema_yml, + subdir_sql, + users_rollup_dependency_sql, + users_rollup_sql, + users_sql, ) - selectors_yml = """ selectors: - name: group_specified_as_string_str diff --git a/tests/functional/graph_selection/test_intersection_syntax.py b/tests/functional/graph_selection/test_intersection_syntax.py index 29c53fd405c..35cf78e6d6d 100644 --- a/tests/functional/graph_selection/test_intersection_syntax.py +++ b/tests/functional/graph_selection/test_intersection_syntax.py @@ -1,9 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.graph_selection.fixtures import SelectionFixtures - selectors_yml = """ selectors: - name: same_intersection diff --git a/tests/functional/graph_selection/test_schema_test_graph_selection.py b/tests/functional/graph_selection/test_schema_test_graph_selection.py index 105397d4112..48ca572f370 100644 --- a/tests/functional/graph_selection/test_schema_test_graph_selection.py +++ b/tests/functional/graph_selection/test_schema_test_graph_selection.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt from dbt.tests.fixtures.project import write_project_files - +from dbt.tests.util import run_dbt from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 from tests.functional.graph_selection.fixtures import SelectionFixtures diff --git a/tests/functional/graph_selection/test_tag_selection.py b/tests/functional/graph_selection/test_tag_selection.py index 7e954ad22db..7561937e18d 100644 --- a/tests/functional/graph_selection/test_tag_selection.py +++ b/tests/functional/graph_selection/test_tag_selection.py @@ -1,9 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.graph_selection.fixtures import SelectionFixtures - selectors_yml = """ selectors: - name: tag_specified_as_string_str diff --git a/tests/functional/graph_selection/test_version_selection.py b/tests/functional/graph_selection/test_version_selection.py index 4f9325a1fb8..335fad25270 100644 --- a/tests/functional/graph_selection/test_version_selection.py +++ b/tests/functional/graph_selection/test_version_selection.py @@ -1,15 +1,14 @@ import pytest -from dbt.tests.util import run_dbt, read_file +from dbt.tests.util import read_file, run_dbt from tests.functional.graph_selection.fixtures import ( - schema_yml, base_users_sql, - users_sql, - users_rollup_sql, properties_yml, + schema_yml, + users_rollup_sql, + users_sql, ) - selectors_yml = """ selectors: - name: version_specified_as_string_str diff --git a/tests/functional/incremental_schema_tests/test_incremental_schema.py b/tests/functional/incremental_schema_tests/test_incremental_schema.py index 8203f497331..28d4ab546bf 100644 --- a/tests/functional/incremental_schema_tests/test_incremental_schema.py +++ b/tests/functional/incremental_schema_tests/test_incremental_schema.py @@ -1,31 +1,27 @@ import pytest -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) - +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.incremental_schema_tests.fixtures import ( - _PROPERTIES__SCHEMA, - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__A, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_IGNORE, - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, _MODELS__INCREMENTAL_IGNORE_TARGET, - _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, - _MODELS__A, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, - _TESTS__SELECT_FROM_INCREMENTAL_IGNORE, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, + _PROPERTIES__SCHEMA, _TESTS__SELECT_FROM_A, + _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS, _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _TESTS__SELECT_FROM_INCREMENTAL_IGNORE, + _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET, _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS, _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, - _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET, - _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS, ) diff --git a/tests/functional/init/test_init.py b/tests/functional/init/test_init.py index 6aee523320c..1c477f9bf0c 100644 --- a/tests/functional/init/test_init.py +++ b/tests/functional/init/test_init.py @@ -1,13 +1,13 @@ -import click import os -import yaml -import pytest from pathlib import Path from unittest import mock from unittest.mock import Mock, call -from dbt.exceptions import DbtRuntimeError +import click +import pytest +import yaml +from dbt.exceptions import DbtRuntimeError from dbt.tests.util import run_dbt diff --git a/tests/functional/invalid_model_tests/test_invalid_models.py b/tests/functional/invalid_model_tests/test_invalid_models.py index 09db17bc325..d931b81331a 100644 --- a/tests/functional/invalid_model_tests/test_invalid_models.py +++ b/tests/functional/invalid_model_tests/test_invalid_models.py @@ -1,10 +1,7 @@ import pytest from dbt.exceptions import CompilationError, ParsingError - -from dbt.tests.util import ( - run_dbt, -) +from dbt.tests.util import run_dbt # from `test/integration/011_invalid_model_tests`, invalid_model_tests diff --git a/tests/functional/list/fixtures.py b/tests/functional/list/fixtures.py index 7681e1c5632..48cd5710347 100644 --- a/tests/functional/list/fixtures.py +++ b/tests/functional/list/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files snapshots__snapshot_sql = """ {% snapshot my_snapshot %} diff --git a/tests/functional/list/test_list.py b/tests/functional/list/test_list.py index 4eb8cb8e2f9..9b51b6798c9 100644 --- a/tests/functional/list/test_list.py +++ b/tests/functional/list/test_list.py @@ -1,21 +1,20 @@ -import pytest -import os import json +import os -from dbt.tests.util import run_dbt -from dbt.logger import log_manager +import pytest +from dbt.tests.util import run_dbt from tests.functional.list.fixtures import ( # noqa: F401 - snapshots, - tests, - models, - macros, - seeds, analyses, - semantic_models, + macros, metrics, - saved_queries, + models, project_files, + saved_queries, + seeds, + semantic_models, + snapshots, + tests, ) @@ -45,14 +44,12 @@ def test_packages_install_path_does_not_exist(self, project): assert not os.path.exists(packages_install_path) def run_dbt_ls(self, args=None, expect_pass=True): - log_manager.stdout_console() full_args = ["ls"] if args is not None: full_args += args result = run_dbt(args=full_args, expect_pass=expect_pass) - log_manager.stdout_console() return result def assert_json_equal(self, json_str, expected): diff --git a/tests/functional/logging/test_logging.py b/tests/functional/logging/test_logging.py index c4799f609d9..9205a8faab4 100644 --- a/tests/functional/logging/test_logging.py +++ b/tests/functional/logging/test_logging.py @@ -1,10 +1,11 @@ -import pytest -from dbt.tests.util import run_dbt, get_manifest, read_file import json import os -from dbt_common.events.functions import fire_event -from dbt.events.types import InvalidOptionYAML +import pytest + +from dbt.events.types import InvalidOptionYAML +from dbt.tests.util import get_manifest, read_file, run_dbt +from dbt_common.events.functions import fire_event my_model_sql = """ select 1 as fun diff --git a/tests/functional/logging/test_meta_logging.py b/tests/functional/logging/test_meta_logging.py index aa262730077..19b10725273 100644 --- a/tests/functional/logging/test_meta_logging.py +++ b/tests/functional/logging/test_meta_logging.py @@ -1,7 +1,9 @@ -import pytest -from dbt.tests.util import run_dbt, read_file import json +import pytest + +from dbt.tests.util import read_file, run_dbt + model1 = "select 1 as fun" model2 = '{{ config(meta={"owners": ["team1", "team2"]})}} select 1 as fun' model3 = '{{ config(meta={"key": 1})}} select 1 as fun' diff --git a/tests/functional/macros/test_macros.py b/tests/functional/macros/test_macros.py index 7838e713be9..fb3d808526b 100644 --- a/tests/functional/macros/test_macros.py +++ b/tests/functional/macros/test_macros.py @@ -1,33 +1,28 @@ -import pytest import shutil - -import dbt_common.exceptions - from pathlib import Path -from dbt.tests.util import ( - run_dbt, - check_relations_equal, -) +import pytest +import dbt_common.exceptions from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.macros.fixtures import ( dbt_project__incorrect_dispatch, - models__dep_macro, - models__with_undefined_macro, - models__local_macro, - models__ref_macro, - models__override_get_columns_macros, - models__deprecated_adapter_macro_model, - models__incorrect_dispatch, - models__materialization_macro, + macros__deprecated_adapter_macro, + macros__incorrect_dispatch, macros__my_macros, + macros__named_materialization, macros__no_default_macros, macros__override_get_columns_macros, macros__package_override_get_columns_macros, - macros__deprecated_adapter_macro, - macros__incorrect_dispatch, - macros__named_materialization, + models__dep_macro, + models__deprecated_adapter_macro_model, + models__incorrect_dispatch, + models__local_macro, + models__materialization_macro, + models__override_get_columns_macros, + models__ref_macro, + models__with_undefined_macro, ) diff --git a/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py b/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py index 45ca4bab307..543a4557eea 100644 --- a/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py +++ b/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py @@ -1,26 +1,21 @@ +from typing import Dict + import pytest -from dataclasses import dataclass, field +from dbt import deprecations from dbt.cli.main import dbtRunner -from dbt_common.events.base_types import BaseEvent, EventLevel, EventMsg -from dbt.events.types import SpacesInModelNameDeprecation, TotalModelNamesWithSpacesDeprecation +from dbt.events.types import ( + ResourceNamesWithSpacesDeprecation, + SpacesInResourceNameDeprecation, +) from dbt.tests.util import update_config_file -from typing import Dict, List - - -@dataclass -class EventCatcher: - event_to_catch: BaseEvent - caught_events: List[EventMsg] = field(default_factory=list) - - def catch(self, event: EventMsg): - if event.info.name == self.event_to_catch.__name__: - self.caught_events.append(event) +from dbt_common.events.base_types import EventLevel +from tests.utils import EventCatcher class TestSpacesInModelNamesHappyPath: def test_no_warnings_when_no_spaces_in_name(self, project) -> None: - event_catcher = EventCatcher(SpacesInModelNameDeprecation) + event_catcher = EventCatcher(SpacesInResourceNameDeprecation) runner = dbtRunner(callbacks=[event_catcher.catch]) runner.invoke(["parse"]) assert len(event_catcher.caught_events) == 0 @@ -34,15 +29,15 @@ def models(self) -> Dict[str, str]: } def tests_warning_when_spaces_in_name(self, project) -> None: - event_catcher = EventCatcher(SpacesInModelNameDeprecation) - total_catcher = EventCatcher(TotalModelNamesWithSpacesDeprecation) + event_catcher = EventCatcher(SpacesInResourceNameDeprecation) + total_catcher = EventCatcher(ResourceNamesWithSpacesDeprecation) runner = dbtRunner(callbacks=[event_catcher.catch, total_catcher.catch]) runner.invoke(["parse"]) assert len(total_catcher.caught_events) == 1 assert len(event_catcher.caught_events) == 1 event = event_catcher.caught_events[0] - assert "Model `my model` has spaces in its name. This is deprecated" in event.info.msg + assert "Found spaces in the name of `model.test.my model`" in event.info.msg assert event.info.level == EventLevel.WARN @@ -55,21 +50,21 @@ def models(self) -> Dict[str, str]: } def tests_debug_when_spaces_in_name(self, project) -> None: - spaces_check_catcher = EventCatcher(SpacesInModelNameDeprecation) - total_catcher = EventCatcher(TotalModelNamesWithSpacesDeprecation) + deprecations.reset_deprecations() + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) + total_catcher = EventCatcher(ResourceNamesWithSpacesDeprecation) runner = dbtRunner(callbacks=[spaces_check_catcher.catch, total_catcher.catch]) runner.invoke(["parse"]) assert len(spaces_check_catcher.caught_events) == 1 assert len(total_catcher.caught_events) == 1 - assert ( - "Spaces in model names found in 2 model(s)" in total_catcher.caught_events[0].info.msg - ) + assert "Spaces found in 2 resource name(s)" in total_catcher.caught_events[0].info.msg assert ( "Run again with `--debug` to see them all." in total_catcher.caught_events[0].info.msg ) - spaces_check_catcher = EventCatcher(SpacesInModelNameDeprecation) - total_catcher = EventCatcher(TotalModelNamesWithSpacesDeprecation) + deprecations.reset_deprecations() + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) + total_catcher = EventCatcher(ResourceNamesWithSpacesDeprecation) runner = dbtRunner(callbacks=[spaces_check_catcher.catch, total_catcher.catch]) runner.invoke(["parse", "--debug"]) assert len(spaces_check_catcher.caught_events) == 2 @@ -87,20 +82,20 @@ def models(self) -> Dict[str, str]: "my model.sql": "select 1 as id", } - def test_dont_allow_spaces_in_model_names(self, project): - spaces_check_catcher = EventCatcher(SpacesInModelNameDeprecation) + def test_require_resource_names_without_spaces(self, project): + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) runner = dbtRunner(callbacks=[spaces_check_catcher.catch]) runner.invoke(["parse"]) assert len(spaces_check_catcher.caught_events) == 1 assert spaces_check_catcher.caught_events[0].info.level == EventLevel.WARN - config_patch = {"flags": {"allow_spaces_in_model_names": False}} + config_patch = {"flags": {"require_resource_names_without_spaces": True}} update_config_file(config_patch, project.project_root, "dbt_project.yml") - spaces_check_catcher = EventCatcher(SpacesInModelNameDeprecation) + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) runner = dbtRunner(callbacks=[spaces_check_catcher.catch]) result = runner.invoke(["parse"]) assert not result.success - assert "Model names cannot contain spaces" in result.exception.__str__() + assert "Resource names cannot contain spaces" in result.exception.__str__() assert len(spaces_check_catcher.caught_events) == 1 assert spaces_check_catcher.caught_events[0].info.level == EventLevel.ERROR diff --git a/tests/functional/materializations/conftest.py b/tests/functional/materializations/conftest.py index b808c1a6a7b..f8b483d46cd 100644 --- a/tests/functional/materializations/conftest.py +++ b/tests/functional/materializations/conftest.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files override_view_adapter_pass_dep__dbt_project_yml = """ name: view_adapter_override @@ -325,6 +325,21 @@ {%- endmaterialization -%} """ +custom_materialization_dep__dbt_project_yml = """ +name: custom_materialization_default +macro-paths: ['macros'] +""" + +custom_materialization_sql = """ +{% materialization custom_materialization, default %} + {%- set target_relation = this.incorporate(type='table') %} + {% call statement('main') -%} + select 1 as column1 + {%- endcall %} + {{ return({'relations': [target_relation]}) }} +{% endmaterialization %} +""" + @pytest.fixture(scope="class") def override_view_adapter_pass_dep(project_root): @@ -368,3 +383,12 @@ def override_view_return_no_relation(project_root): }, } write_project_files(project_root, "override-view-return-no-relation", files) + + +@pytest.fixture(scope="class") +def custom_materialization_dep(project_root): + files = { + "dbt_project.yml": custom_materialization_dep__dbt_project_yml, + "macros": {"custom_materialization.sql": custom_materialization_sql}, + } + write_project_files(project_root, "custom-materialization-dep", files) diff --git a/tests/functional/materializations/test_custom_materialization.py b/tests/functional/materializations/test_custom_materialization.py index 838eb68bb01..ae377cdb428 100644 --- a/tests/functional/materializations/test_custom_materialization.py +++ b/tests/functional/materializations/test_custom_materialization.py @@ -1,8 +1,8 @@ import pytest +from dbt import deprecations from dbt.tests.util import run_dbt - models__model_sql = """ {{ config(materialized='view') }} select 1 as id @@ -10,11 +10,24 @@ """ +models_custom_materialization__model_sql = """ +{{ config(materialized='custom_materialization') }} +select 1 as id + +""" + + @pytest.fixture(scope="class") def models(): return {"model.sql": models__model_sql} +@pytest.fixture(scope="class") +def set_up_deprecations(): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + + class TestOverrideAdapterDependency: # make sure that if there's a dependency with an adapter-specific # materialization, we honor that materialization @@ -22,22 +35,165 @@ class TestOverrideAdapterDependency: def packages(self): return {"packages": [{"local": "override-view-adapter-dep"}]} - def test_adapter_dependency(self, project, override_view_adapter_dep): + def test_adapter_dependency(self, project, override_view_adapter_dep, set_up_deprecations): + run_dbt(["deps"]) + # this should pass because implicit overrides are now deprecated (= disabled by default) + run_dbt(["run"]) + + +class TestOverrideAdapterDependencyDeprecated: + # make sure that if there's a dependency with an adapter-specific + # materialization, we honor that materialization + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": True, + }, + } + + def test_adapter_dependency_deprecate_overrides( + self, project, override_view_adapter_dep, set_up_deprecations + ): + run_dbt(["deps"]) + # this should pass because the override is buggy and unused + run_dbt(["run"]) + + # no deprecation warning -- flag used correctly + assert deprecations.active_deprecations == set() + + +class TestOverrideAdapterDependencyLegacy: + # make sure that if there's a dependency with an adapter-specific + # materialization, we honor that materialization + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": False, + }, + } + + def test_adapter_dependency(self, project, override_view_adapter_dep, set_up_deprecations): run_dbt(["deps"]) # this should error because the override is buggy run_dbt(["run"], expect_pass=False) + # overriding a built-in materialization scoped to adapter from package is deprecated + assert deprecations.active_deprecations == {"package-materialization-override"} + class TestOverrideDefaultDependency: @pytest.fixture(scope="class") def packages(self): return {"packages": [{"local": "override-view-default-dep"}]} - def test_default_dependency(self, project, override_view_default_dep): + def test_default_dependency(self, project, override_view_default_dep, set_up_deprecations): + run_dbt(["deps"]) + # this should pass because implicit overrides are now deprecated (= disabled by default) + run_dbt(["run"]) + + +class TestOverrideDefaultDependencyDeprecated: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": True, + }, + } + + def test_default_dependency_deprecated( + self, project, override_view_default_dep, set_up_deprecations + ): + run_dbt(["deps"]) + # this should pass because the override is buggy and unused + run_dbt(["run"]) + + # overriding a built-in materialization from package is deprecated + assert deprecations.active_deprecations == set() + + +class TestOverrideDefaultDependencyLegacy: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": False, + }, + } + + def test_default_dependency(self, project, override_view_default_dep, set_up_deprecations): + run_dbt(["deps"]) + # this should error because the override is buggy + run_dbt(["run"], expect_pass=False) + + # overriding a built-in materialization from package is deprecated + assert deprecations.active_deprecations == {"package-materialization-override"} + + +root_view_override_macro = """ +{% materialization view, default %} + {{ return(view_default_override.materialization_view_default()) }} +{% endmaterialization %} +""" + + +class TestOverrideDefaultDependencyRootOverride: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + @pytest.fixture(scope="class") + def macros(self): + return {"my_view.sql": root_view_override_macro} + + def test_default_dependency_with_root_override( + self, project, override_view_default_dep, set_up_deprecations + ): run_dbt(["deps"]) # this should error because the override is buggy run_dbt(["run"], expect_pass=False) + # using an package-overriden built-in materialization in a root matereialization is _not_ deprecated + assert deprecations.active_deprecations == set() + + +class TestCustomMaterializationDependency: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models_custom_materialization__model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "custom-materialization-dep"}]} + + def test_custom_materialization_deopendency( + self, project, custom_materialization_dep, set_up_deprecations + ): + run_dbt(["deps"]) + # custom materilization is valid + run_dbt(["run"]) + + # using a custom materialization is from an installed package is _not_ deprecated + assert deprecations.active_deprecations == set() + class TestOverrideAdapterDependencyPassing: @pytest.fixture(scope="class") diff --git a/tests/functional/materializations/test_ephemeral_compilation.py b/tests/functional/materializations/test_ephemeral_compilation.py index f8419e40fd5..3b8ea9d4a86 100644 --- a/tests/functional/materializations/test_ephemeral_compilation.py +++ b/tests/functional/materializations/test_ephemeral_compilation.py @@ -1,31 +1,31 @@ +import pytest + from dbt.contracts.graph.nodes import ModelNode from dbt.contracts.results import RunExecutionResult, RunResult -import pytest from dbt.tests.util import run_dbt - -# Note: This tests compilation only, so is a dbt Core test and not an adapter test. -# There is some complicated logic in core/dbt/compilation.py having to do with -# ephemeral nodes and handling multiple threads at the same time. This test -# fails fairly regularly if that is broken, but does occasionally work (depending -# on the order in which things are compiled). It requires multi-threading to fail. - from tests.functional.materializations.fixtures import ( - fct_eph_first_sql, - int_eph_first_sql, - schema_yml, - bar_sql, bar1_sql, bar2_sql, bar3_sql, bar4_sql, bar5_sql, - baz_sql, + bar_sql, baz1_sql, - foo_sql, + baz_sql, + fct_eph_first_sql, foo1_sql, foo2_sql, + foo_sql, + int_eph_first_sql, + schema_yml, ) +# Note: This tests compilation only, so is a dbt Core test and not an adapter test. +# There is some complicated logic in core/dbt/compilation.py having to do with +# ephemeral nodes and handling multiple threads at the same time. This test +# fails fairly regularly if that is broken, but does occasionally work (depending +# on the order in which things are compiled). It requires multi-threading to fail. + SUPPRESSED_CTE_EXPECTED_OUTPUT = """-- fct_eph_first.sql diff --git a/tests/functional/materializations/test_incremental.py b/tests/functional/materializations/test_incremental.py index b331c9d14b5..48745d0f987 100644 --- a/tests/functional/materializations/test_incremental.py +++ b/tests/functional/materializations/test_incremental.py @@ -1,8 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest -from dbt.exceptions import DbtRuntimeError -from dbt.context.providers import generate_runtime_model_context +from dbt.context.providers import generate_runtime_model_context +from dbt.exceptions import DbtRuntimeError +from dbt.tests.util import get_manifest, run_dbt my_model_sql = """ select 1 as fun diff --git a/tests/functional/materializations/test_runtime_materialization.py b/tests/functional/materializations/test_runtime_materialization.py index 336aac561f2..6f84b478e2b 100644 --- a/tests/functional/materializations/test_runtime_materialization.py +++ b/tests/functional/materializations/test_runtime_materialization.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal, check_table_does_not_exist +from dbt.tests.util import check_relations_equal, check_table_does_not_exist, run_dbt models__view_sql = """ {{ diff --git a/tests/functional/materializations/test_supported_languages.py b/tests/functional/materializations/test_supported_languages.py index a2ef8077de5..5e7b8b332e4 100644 --- a/tests/functional/materializations/test_supported_languages.py +++ b/tests/functional/materializations/test_supported_languages.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt custom_mat_tmpl = """ {% materialization custom_mat{} %} diff --git a/tests/functional/metrics/test_metric_configs.py b/tests/functional/metrics/test_metric_configs.py index 5a1246dae9a..2be68d9e17f 100644 --- a/tests/functional/metrics/test_metric_configs.py +++ b/tests/functional/metrics/test_metric_configs.py @@ -1,19 +1,18 @@ import pytest + from dbt.artifacts.resources import MetricConfig -from dbt_common.dataclass_schema import ValidationError from dbt.exceptions import CompilationError, ParsingError -from dbt.tests.util import run_dbt, update_config_file, get_manifest - - +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt_common.dataclass_schema import ValidationError from tests.functional.metrics.fixtures import ( - models_people_sql, - models_people_metrics_yml, - models_people_metrics_meta_top_yml, - metricflow_time_spine_sql, disabled_metric_level_schema_yml, enabled_metric_level_schema_yml, - models_people_metrics_sql, invalid_config_metric_yml, + metricflow_time_spine_sql, + models_people_metrics_meta_top_yml, + models_people_metrics_sql, + models_people_metrics_yml, + models_people_sql, semantic_model_people_yml, ) diff --git a/tests/functional/metrics/test_metric_deferral.py b/tests/functional/metrics/test_metric_deferral.py index 8803bf249da..603016acb0c 100644 --- a/tests/functional/metrics/test_metric_deferral.py +++ b/tests/functional/metrics/test_metric_deferral.py @@ -1,7 +1,9 @@ import os -import pytest -from dbt.tests.util import run_dbt, copy_file, write_file from pathlib import Path + +import pytest + +from dbt.tests.util import copy_file, run_dbt, write_file from tests.functional.metrics.fixtures import ( metrics_1_yml, metrics_2_yml, diff --git a/tests/functional/metrics/test_metric_helper_functions.py b/tests/functional/metrics/test_metric_helper_functions.py index a5775a0ed74..8abdce4af29 100644 --- a/tests/functional/metrics/test_metric_helper_functions.py +++ b/tests/functional/metrics/test_metric_helper_functions.py @@ -1,14 +1,13 @@ import pytest -from dbt.tests.util import run_dbt from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.metrics import ResolvedMetricReference - +from dbt.tests.util import run_dbt from tests.functional.metrics.fixtures import ( - models_people_sql, basic_metrics_yml, - semantic_model_people_yml, metricflow_time_spine_sql, + models_people_sql, + semantic_model_people_yml, ) diff --git a/tests/functional/metrics/test_metrics.py b/tests/functional/metrics/test_metrics.py index f87cc56b9b3..b640c90c199 100644 --- a/tests/functional/metrics/test_metrics.py +++ b/tests/functional/metrics/test_metrics.py @@ -3,34 +3,32 @@ from dbt.cli.main import dbtRunner from dbt.contracts.graph.manifest import Manifest from dbt.exceptions import ParsingError -from dbt.tests.util import run_dbt, get_manifest - - +from dbt.tests.util import get_manifest, run_dbt from tests.functional.metrics.fixtures import ( - conversion_semantic_model_purchasing_yml, + basic_metrics_yml, conversion_metric_yml, - mock_purchase_data_csv, - models_people_sql, - models_people_metrics_yml, - invalid_models_people_metrics_yml, - invalid_metrics_missing_model_yml, - invalid_metrics_missing_expression_yml, - names_with_spaces_metrics_yml, - names_with_special_chars_metrics_yml, - names_with_leading_numeric_metrics_yml, - long_name_metrics_yml, + conversion_semantic_model_purchasing_yml, + derived_metric_yml, downstream_model_sql, + duplicate_measure_metric_yml, + filtered_metrics_yml, invalid_derived_metric_contains_model_yml, - derived_metric_yml, invalid_metric_without_timestamp_with_time_grains_yml, invalid_metric_without_timestamp_with_window_yml, + invalid_metrics_missing_expression_yml, + invalid_metrics_missing_model_yml, + invalid_models_people_metrics_yml, + long_name_metrics_yml, metricflow_time_spine_sql, + mock_purchase_data_csv, + models_people_metrics_yml, + models_people_sql, + names_with_leading_numeric_metrics_yml, + names_with_spaces_metrics_yml, + names_with_special_chars_metrics_yml, + purchasing_model_sql, semantic_model_people_yml, semantic_model_purchasing_yml, - purchasing_model_sql, - filtered_metrics_yml, - basic_metrics_yml, - duplicate_measure_metric_yml, ) diff --git a/tests/functional/partial_parsing/test_file_diff.py b/tests/functional/partial_parsing/test_file_diff.py index f2493de19df..3661a823a65 100644 --- a/tests/functional/partial_parsing/test_file_diff.py +++ b/tests/functional/partial_parsing/test_file_diff.py @@ -1,10 +1,10 @@ import os + import pytest from dbt.tests.util import run_dbt, write_artifact, write_file from tests.functional.partial_parsing.fixtures import model_one_sql, model_two_sql - first_file_diff = { "deleted": [], "changed": [], diff --git a/tests/functional/partial_parsing/test_partial_parsing.py b/tests/functional/partial_parsing/test_partial_parsing.py index 6b5ba8895cd..5a1f496c511 100644 --- a/tests/functional/partial_parsing/test_partial_parsing.py +++ b/tests/functional/partial_parsing/test_partial_parsing.py @@ -1,85 +1,84 @@ +import os +import re from argparse import Namespace -import pytest from unittest import mock +import pytest +import yaml + import dbt.flags as flags +from dbt.contracts.files import ParseFileType +from dbt.contracts.results import TestStatus +from dbt.exceptions import CompilationError +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes +from dbt.tests.fixtures.project import write_project_files from dbt.tests.util import ( - run_dbt, get_manifest, - write_file, + rename_dir, rm_file, + run_dbt, run_dbt_and_capture, - rename_dir, + write_file, ) -import yaml -from tests.functional.utils import up_one -from dbt.tests.fixtures.project import write_project_files from tests.functional.partial_parsing.fixtures import ( + custom_schema_tests1_sql, + custom_schema_tests2_sql, + customers1_md, + customers2_md, + customers_sql, + empty_schema_with_version_yml, + empty_schema_yml, + generic_schema_yml, + generic_test_edited_sql, + generic_test_schema_yml, + generic_test_sql, + gsm_override2_sql, + gsm_override_sql, + local_dependency__dbt_project_yml, + local_dependency__macros__dep_macro_sql, + local_dependency__models__model_to_import_sql, + local_dependency__models__schema_yml, + local_dependency__seeds__seed_csv, + macros_schema_yml, + macros_yml, + model_a_sql, + model_b_sql, + model_four1_sql, + model_four2_sql, model_one_sql, + model_three_disabled2_sql, + model_three_disabled_sql, + model_three_modified_sql, + model_three_sql, model_two_sql, models_schema1_yml, models_schema2_yml, models_schema2b_yml, - model_three_sql, - model_three_modified_sql, - model_four1_sql, - model_four2_sql, + models_schema3_yml, models_schema4_yml, models_schema4b_yml, - models_schema3_yml, - my_macro_sql, + my_analysis_sql, my_macro2_sql, - macros_yml, - empty_schema_yml, - empty_schema_with_version_yml, - model_three_disabled_sql, - model_three_disabled2_sql, + my_macro_sql, + my_test_sql, + orders_sql, raw_customers_csv, - customers_sql, - sources_tests1_sql, + ref_override2_sql, + ref_override_sql, + schema_models_c_yml, schema_sources1_yml, schema_sources2_yml, schema_sources3_yml, schema_sources4_yml, schema_sources5_yml, - customers1_md, - customers2_md, - test_macro_sql, - my_test_sql, - test_macro2_sql, - my_analysis_sql, - sources_tests2_sql, - local_dependency__dbt_project_yml, - local_dependency__models__schema_yml, - local_dependency__models__model_to_import_sql, - local_dependency__macros__dep_macro_sql, - local_dependency__seeds__seed_csv, - schema_models_c_yml, - model_a_sql, - model_b_sql, - macros_schema_yml, - custom_schema_tests1_sql, - custom_schema_tests2_sql, - ref_override_sql, - ref_override2_sql, - gsm_override_sql, - gsm_override2_sql, - orders_sql, - snapshot_sql, snapshot2_sql, - generic_schema_yml, - generic_test_sql, - generic_test_schema_yml, - generic_test_edited_sql, + snapshot_sql, + sources_tests1_sql, + sources_tests2_sql, + test_macro2_sql, + test_macro_sql, ) - -from dbt.exceptions import CompilationError -from dbt.contracts.files import ParseFileType -from dbt.contracts.results import TestStatus -from dbt.plugins.manifest import PluginNodes, ModelNodeArgs - -import re -import os +from tests.functional.utils import up_one os.environ["DBT_PP_TEST"] = "true" diff --git a/tests/functional/partial_parsing/test_pp_disabled_config.py b/tests/functional/partial_parsing/test_pp_disabled_config.py index 03d2e8a728b..472d84ecff6 100644 --- a/tests/functional/partial_parsing/test_pp_disabled_config.py +++ b/tests/functional/partial_parsing/test_pp_disabled_config.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, write_file, get_manifest + +from dbt.tests.util import get_manifest, run_dbt, write_file model_one_sql = """ select 1 as fun diff --git a/tests/functional/partial_parsing/test_pp_docs.py b/tests/functional/partial_parsing/test_pp_docs.py index 871e015b493..c818cca364b 100644 --- a/tests/functional/partial_parsing/test_pp_docs.py +++ b/tests/functional/partial_parsing/test_pp_docs.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, write_file, get_manifest, rm_file + +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file model_one_sql = """ select 1 as fun diff --git a/tests/functional/partial_parsing/test_pp_groups.py b/tests/functional/partial_parsing/test_pp_groups.py index 5871d3dd98f..57a0917105e 100644 --- a/tests/functional/partial_parsing/test_pp_groups.py +++ b/tests/functional/partial_parsing/test_pp_groups.py @@ -1,17 +1,16 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, write_file from dbt.exceptions import ParsingError - +from dbt.tests.util import get_manifest, run_dbt, write_file from tests.functional.partial_parsing.fixtures import ( - orders_sql, - orders_downstream_sql, groups_schema_yml_one_group, + groups_schema_yml_one_group_model_in_group2, groups_schema_yml_two_groups, groups_schema_yml_two_groups_edited, - groups_schema_yml_one_group_model_in_group2, - groups_schema_yml_two_groups_private_orders_valid_access, groups_schema_yml_two_groups_private_orders_invalid_access, + groups_schema_yml_two_groups_private_orders_valid_access, + orders_downstream_sql, + orders_sql, ) diff --git a/tests/functional/partial_parsing/test_pp_metrics.py b/tests/functional/partial_parsing/test_pp_metrics.py index 19da625604b..9aa5530c419 100644 --- a/tests/functional/partial_parsing/test_pp_metrics.py +++ b/tests/functional/partial_parsing/test_pp_metrics.py @@ -2,20 +2,19 @@ from dbt.cli.main import dbtRunner from dbt.contracts.graph.manifest import Manifest -from dbt.tests.util import run_dbt, rm_file, write_file, get_manifest +from dbt.exceptions import CompilationError +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file from tests.functional.partial_parsing.fixtures import ( - people_sql, + metric_model_a_sql, metricflow_time_spine_sql, - people_semantic_models_yml, - people_metrics_yml, people_metrics2_yml, - metric_model_a_sql, people_metrics3_yml, + people_metrics_yml, + people_semantic_models_yml, people_sl_yml, + people_sql, ) -from dbt.exceptions import CompilationError - class TestMetrics: @pytest.fixture(scope="class") diff --git a/tests/functional/partial_parsing/test_pp_vars.py b/tests/functional/partial_parsing/test_pp_vars.py index e55592f8dd2..a2e915dcb34 100644 --- a/tests/functional/partial_parsing/test_pp_vars.py +++ b/tests/functional/partial_parsing/test_pp_vars.py @@ -2,11 +2,11 @@ from pathlib import Path import pytest -from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import ParsingError + from dbt.adapters.exceptions import FailedToConnectError +from dbt.exceptions import ParsingError from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture, write_file - +from dbt_common.constants import SECRET_ENV_PREFIX from tests.functional.partial_parsing.fixtures import ( env_var_macro_sql, env_var_macros_yml, @@ -366,7 +366,7 @@ def dbt_profile_target(self): # user is secret and password is not. postgres on macos doesn't care if the password # changes so we have to change the user. related: https://github.com/dbt-labs/dbt-core/pull/4250 - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "root" os.environ["ENV_VAR_PASS"] = "password" return { "type": "postgres", @@ -381,7 +381,7 @@ def dbt_profile_target(self): def test_profile_secret_env_vars(self, project): # Initial run - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "root" os.environ["ENV_VAR_PASS"] = "password" results = run_dbt(["run"]) @@ -389,7 +389,7 @@ def test_profile_secret_env_vars(self, project): env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum # Change a secret var, it shouldn't register because we shouldn't save secrets. - os.environ[SECRET_ENV_PREFIX + "USER"] = "fake_user" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "fake_user" # we just want to see if the manifest has included # the secret in the hash of environment variables. (results, log_output) = run_dbt_and_capture(["run"], expect_pass=True) diff --git a/tests/functional/partial_parsing/test_versioned_models.py b/tests/functional/partial_parsing/test_versioned_models.py index 06e56d9c0cd..0e6ef22cd2f 100644 --- a/tests/functional/partial_parsing/test_versioned_models.py +++ b/tests/functional/partial_parsing/test_versioned_models.py @@ -1,13 +1,9 @@ -import pytest import pathlib -from dbt.tests.util import ( - run_dbt, - get_manifest, - write_file, - rm_file, - read_file, -) + +import pytest + from dbt.exceptions import DuplicateVersionedUnversionedError +from dbt.tests.util import get_manifest, read_file, rm_file, run_dbt, write_file model_one_sql = """ select 1 as fun diff --git a/tests/functional/permission/fixtures.py b/tests/functional/permission/fixtures.py index e6014b09754..7ac2f6f3665 100644 --- a/tests/functional/permission/fixtures.py +++ b/tests/functional/permission/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files models__view_model_sql = """ diff --git a/tests/functional/postgres/test_postgres_indexes.py b/tests/functional/postgres/test_postgres_indexes.py index 143a0888755..ceb9d0514bd 100644 --- a/tests/functional/postgres/test_postgres_indexes.py +++ b/tests/functional/postgres/test_postgres_indexes.py @@ -1,21 +1,19 @@ -import pytest import re -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, -) + +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture from tests.functional.postgres.fixtures import ( models__incremental_sql, models__table_sql, - models_invalid__missing_columns_sql, models_invalid__invalid_columns_type_sql, models_invalid__invalid_type_sql, models_invalid__invalid_unique_config_sql, + models_invalid__missing_columns_sql, seeds__seed_csv, snapshots__colors_sql, ) - INDEX_DEFINITION_PATTERN = re.compile(r"using\s+(\w+)\s+\((.+)\)\Z") diff --git a/tests/functional/postgres/test_postgres_unlogged_table.py b/tests/functional/postgres/test_postgres_unlogged_table.py index bfb739ef41e..50c23635b64 100644 --- a/tests/functional/postgres/test_postgres_unlogged_table.py +++ b/tests/functional/postgres/test_postgres_unlogged_table.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt schema_yml = """ version: 2 diff --git a/tests/functional/primary_keys/fixtures.py b/tests/functional/primary_keys/fixtures.py new file mode 100644 index 00000000000..888e9d65190 --- /dev/null +++ b/tests/functional/primary_keys/fixtures.py @@ -0,0 +1,88 @@ +simple_model_sql = """ +select 1 as id, 'blue' as color +""" + +simple_model_unique_test = """ +models: + - name: simple_model + columns: + - name: id + tests: + - unique +""" + +simple_model_disabled_unique_test = """ +models: + - name: simple_model + columns: + - name: id + tests: + - unique: + enabled: false + +""" + +simple_model_unique_not_null_tests = """ +models: + - name: simple_model + columns: + - name: id + tests: + - unique + - not_null +""" + +simple_model_unique_combo_of_columns = """ +models: + - name: simple_model + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: [id, color] +""" + +simple_model_constraints = """ +models: + - name: simple_model + config: + contract: + enforced: true + columns: + - name: id + data_type: int + constraints: + - type: not_null + - type: primary_key + - name: color + data_type: text +""" + +simple_model_two_versions_both_configured = """ +models: + - name: simple_model + latest_version: 1 + columns: + - name: id + tests: + - unique + - not_null + versions: + - v: 1 + - v: 2 +""" + +simple_model_two_versions_exclude_col = """ +models: + - name: simple_model + latest_version: 1 + columns: + - name: id + tests: + - unique + - not_null + versions: + - v: 1 + - v: 2 + columns: + - include: all + exclude: [id] +""" diff --git a/tests/functional/primary_keys/test_primary_keys.py b/tests/functional/primary_keys/test_primary_keys.py new file mode 100644 index 00000000000..866ff2967ce --- /dev/null +++ b/tests/functional/primary_keys/test_primary_keys.py @@ -0,0 +1,157 @@ +import pytest + +from dbt.tests.util import get_manifest, run_dbt +from tests.functional.primary_keys.fixtures import ( + simple_model_constraints, + simple_model_disabled_unique_test, + simple_model_sql, + simple_model_two_versions_both_configured, + simple_model_two_versions_exclude_col, + simple_model_unique_combo_of_columns, + simple_model_unique_not_null_tests, + simple_model_unique_test, +) + + +class TestSimpleModelNoYml: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + } + + def test_simple_model_no_yml(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == [] + + +class TestSimpleModelConstraints: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_constraints, + } + + def test_simple_model_constraints(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestSimpleModelUniqueNotNullTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_unique_not_null_tests, + } + + def test_simple_model_unique_not_null_tests(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestSimpleModelUniqueTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_unique_test, + } + + def test_simple_model_unique_test(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestSimpleModelDisabledUniqueTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_disabled_unique_test, + } + + def test_simple_model_disabled_unique_test(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestVersionedSimpleModel: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model_v1.sql": simple_model_sql, + "simple_model_v2.sql": simple_model_sql, + "schema.yml": simple_model_two_versions_both_configured, + } + + def test_versioned_simple_model(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node_v1 = manifest.nodes["model.test.simple_model.v1"] + node_v2 = manifest.nodes["model.test.simple_model.v2"] + assert node_v1.primary_key == ["id"] + assert node_v2.primary_key == ["id"] + + +class TestVersionedSimpleModelExcludeTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model_v1.sql": simple_model_sql, + "simple_model_v2.sql": simple_model_sql, + "schema.yml": simple_model_two_versions_exclude_col, + } + + def test_versioned_simple_model_exclude_col(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node_v1 = manifest.nodes["model.test.simple_model.v1"] + node_v2 = manifest.nodes["model.test.simple_model.v2"] + assert node_v1.primary_key == ["id"] + assert node_v2.primary_key == [] + + +class TestSimpleModelCombinationOfColumns: + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-utils.git", + "revision": "1.1.0", + }, + ] + } + + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_unique_combo_of_columns, + } + + def test_versioned_simple_combo_of_columns(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["color", "id"] diff --git a/tests/functional/profiles/test_profile_dir.py b/tests/functional/profiles/test_profile_dir.py index b9237af5dce..8545e2c4773 100644 --- a/tests/functional/profiles/test_profile_dir.py +++ b/tests/functional/profiles/test_profile_dir.py @@ -1,19 +1,13 @@ import os -import pytest -import yaml - +from argparse import Namespace from contextlib import contextmanager from pathlib import Path -from argparse import Namespace -import dbt.flags as flags +import pytest +import yaml -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - write_file, - rm_file, -) +import dbt.flags as flags +from dbt.tests.util import rm_file, run_dbt, run_dbt_and_capture, write_file @pytest.fixture(scope="class") diff --git a/tests/functional/profiles/test_profiles_yml.py b/tests/functional/profiles/test_profiles_yml.py index 50771c24132..2af18b5f982 100644 --- a/tests/functional/profiles/test_profiles_yml.py +++ b/tests/functional/profiles/test_profiles_yml.py @@ -1,4 +1,5 @@ import pathlib + from test_profile_dir import environ from dbt.cli.main import dbtRunner diff --git a/tests/functional/record/record.py b/tests/functional/record/record.py new file mode 100644 index 00000000000..6b1779f8fc0 --- /dev/null +++ b/tests/functional/record/record.py @@ -0,0 +1,17 @@ +import os + +from dbt.tests.util import run_dbt + + +class TestRecord: + def test_record_when_env_var_set(self, project): + temp = os.environ.get("DBT_RECORD", None) + try: + os.environ["DBT_RECORD"] = "True" + run_dbt(["run"]) + assert os.path.isfile(os.path.join(os.getcwd(), "recording.json")) + finally: + if temp is None: + del os.environ["DBT_RECORD"] + else: + os.environ["DBT_RECORD"] = temp diff --git a/tests/functional/ref_override/test_ref_override.py b/tests/functional/ref_override/test_ref_override.py index d77dcc78bc4..97ffa790173 100644 --- a/tests/functional/ref_override/test_ref_override.py +++ b/tests/functional/ref_override/test_ref_override.py @@ -1,7 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal - +from dbt.tests.util import check_relations_equal, run_dbt models__ref_override_sql = """ select diff --git a/tests/functional/retry/test_retry.py b/tests/functional/retry/test_retry.py index 7bf8d8a0ef5..012db25e42f 100644 --- a/tests/functional/retry/test_retry.py +++ b/tests/functional/retry/test_retry.py @@ -5,13 +5,13 @@ from dbt.contracts.results import RunStatus, TestStatus from dbt.exceptions import DbtRuntimeError, TargetNotFoundError -from dbt.tests.util import run_dbt, write_file, rm_file +from dbt.tests.util import rm_file, run_dbt, write_file from tests.functional.retry.fixtures import ( + macros__alter_timezone_sql, models__sample_model, + models__second_model, models__union_model, schema_yml, - models__second_model, - macros__alter_timezone_sql, simple_model, simple_schema, ) diff --git a/tests/functional/run_operations/test_run_operations.py b/tests/functional/run_operations/test_run_operations.py index 465deb6c643..064c98b3a51 100644 --- a/tests/functional/run_operations/test_run_operations.py +++ b/tests/functional/run_operations/test_run_operations.py @@ -3,17 +3,21 @@ import pytest import yaml -from dbt_common.exceptions import DbtInternalError from dbt.tests.util import ( check_table_does_exist, - run_dbt, - write_file, mkdir, - run_dbt_and_capture, rm_dir, rm_file, + run_dbt, + run_dbt_and_capture, + write_file, +) +from dbt_common.exceptions import DbtInternalError +from tests.functional.run_operations.fixtures import ( + happy_macros_sql, + model_sql, + sad_macros_sql, ) -from tests.functional.run_operations.fixtures import happy_macros_sql, sad_macros_sql, model_sql class TestOperations: diff --git a/tests/functional/saved_queries/test_configs.py b/tests/functional/saved_queries/test_configs.py index ef63888441a..1ce068bc261 100644 --- a/tests/functional/saved_queries/test_configs.py +++ b/tests/functional/saved_queries/test_configs.py @@ -1,18 +1,20 @@ import pytest +from dbt_semantic_interfaces.type_enums.export_destination_type import ( + ExportDestinationType, +) from dbt.contracts.graph.manifest import Manifest from dbt.tests.util import update_config_file -from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType from tests.functional.assertions.test_runner import dbtTestRunner from tests.functional.configs.fixtures import BaseConfigProject from tests.functional.saved_queries.fixtures import ( + saved_queries_with_defaults_yml, saved_queries_yml, saved_query_description, saved_query_with_cache_configs_defined_yml, - saved_query_with_extra_config_attributes_yml, saved_query_with_export_configs_defined_at_saved_query_level_yml, + saved_query_with_extra_config_attributes_yml, saved_query_without_export_configs_defined_yml, - saved_queries_with_defaults_yml, ) from tests.functional.semantic_models.fixtures import ( fct_revenue_sql, diff --git a/tests/functional/saved_queries/test_saved_query_build.py b/tests/functional/saved_queries/test_saved_query_build.py index 2f721b15337..dc89521f380 100644 --- a/tests/functional/saved_queries/test_saved_query_build.py +++ b/tests/functional/saved_queries/test_saved_query_build.py @@ -1,7 +1,10 @@ import pytest from dbt.tests.util import run_dbt -from tests.functional.saved_queries.fixtures import saved_queries_yml, saved_query_description +from tests.functional.saved_queries.fixtures import ( + saved_queries_yml, + saved_query_description, +) from tests.functional.semantic_models.fixtures import ( fct_revenue_sql, metricflow_time_spine_sql, diff --git a/tests/functional/saved_queries/test_saved_query_parsing.py b/tests/functional/saved_queries/test_saved_query_parsing.py index 9c02b7c5418..ce3763acfbc 100644 --- a/tests/functional/saved_queries/test_saved_query_parsing.py +++ b/tests/functional/saved_queries/test_saved_query_parsing.py @@ -1,16 +1,18 @@ from typing import List import pytest +from dbt_semantic_interfaces.type_enums.export_destination_type import ( + ExportDestinationType, +) from dbt.contracts.graph.manifest import Manifest -from dbt_common.events.base_types import BaseEvent from dbt.tests.util import write_file -from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType +from dbt_common.events.base_types import BaseEvent from tests.functional.assertions.test_runner import dbtTestRunner from tests.functional.saved_queries.fixtures import ( + saved_queries_with_diff_filters_yml, saved_queries_yml, saved_query_description, - saved_queries_with_diff_filters_yml, ) from tests.functional.semantic_models.fixtures import ( fct_revenue_sql, diff --git a/tests/functional/schema/test_custom_schema.py b/tests/functional/schema/test_custom_schema.py index 7262a79cce9..5a9969e4284 100644 --- a/tests/functional/schema/test_custom_schema.py +++ b/tests/functional/schema/test_custom_schema.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.schema.fixtures.macros import ( _CUSTOM_MACRO, _CUSTOM_MACRO_MULTI_SCHEMA, diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py index 34286d82532..ea33e62bce3 100644 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -1,102 +1,103 @@ -import pytest import os import re -from dbt.tests.util import run_dbt, write_file +import pytest + +from dbt.contracts.results import TestStatus +from dbt.exceptions import CompilationError, DuplicateResourceNameError, ParsingError from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt, write_file from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 from tests.functional.schema_tests.fixtures import ( - wrong_specification_block__schema_yml, - test_context_where_subq_models__schema_yml, - test_context_where_subq_models__model_a_sql, - test_utils__dbt_project_yml, - test_utils__macros__current_timestamp_sql, - test_utils__macros__custom_test_sql, - local_dependency__dbt_project_yml, - local_dependency__macros__equality_sql, - case_sensitive_models__schema_yml, + all_quotes_schema__schema_yml, + alt_local_utils__macros__type_timestamp_sql, case_sensitive_models__lowercase_sql, - test_context_macros__my_test_sql, - test_context_macros__test_my_datediff_sql, - test_context_macros__custom_schema_tests_sql, - test_context_models_namespaced__schema_yml, - test_context_models_namespaced__model_c_sql, - test_context_models_namespaced__model_b_sql, - test_context_models_namespaced__model_a_sql, - macros_v2__override_get_test_macros_fail__get_test_sql_sql, - macros_v2__macros__tests_sql, - macros_v2__custom_configs__test_sql, - macros_v2__override_get_test_macros__get_test_sql_sql, - test_context_macros_namespaced__my_test_sql, - test_context_macros_namespaced__custom_schema_tests_sql, - seeds__some_seed_csv, - test_context_models__schema_yml, - test_context_models__model_c_sql, - test_context_models__model_b_sql, - test_context_models__model_a_sql, - name_collision__schema_yml, - name_collision__base_sql, - name_collision__base_extension_sql, - dupe_generic_tests_collide__schema_yml, - dupe_generic_tests_collide__model_a, - custom_generic_test_config_custom_macro__schema_yml, + case_sensitive_models__schema_yml, + case_sensitive_models__uppercase_SQL, custom_generic_test_config_custom_macro__model_a, - custom_generic_test_names__schema_yml, + custom_generic_test_config_custom_macro__schema_yml, custom_generic_test_names__model_a, - custom_generic_test_names_alt_format__schema_yml, + custom_generic_test_names__schema_yml, custom_generic_test_names_alt_format__model_a, - test_context_where_subq_macros__custom_generic_test_sql, - invalid_schema_models__schema_yml, + custom_generic_test_names_alt_format__schema_yml, + dupe_generic_tests_collide__model_a, + dupe_generic_tests_collide__schema_yml, + ephemeral__ephemeral_sql, + ephemeral__schema_yml, invalid_schema_models__model_sql, + invalid_schema_models__schema_yml, + local_dependency__dbt_project_yml, + local_dependency__macros__equality_sql, + local_utils__dbt_project_yml, + local_utils__macros__current_timestamp_sql, + local_utils__macros__custom_test_sql, + local_utils__macros__datediff_sql, + macro_resolution_order_macros__my_custom_test_sql, + macro_resolution_order_models__config_yml, + macro_resolution_order_models__my_model_sql, + macros_v2__custom_configs__test_sql, + macros_v2__macros__tests_sql, + macros_v2__override_get_test_macros__get_test_sql_sql, + macros_v2__override_get_test_macros_fail__get_test_sql_sql, + models_v2__custom__schema_yml, + models_v2__custom__table_copy_sql, + models_v2__custom_configs__schema_yml, + models_v2__custom_configs__table_copy_another_one_sql, + models_v2__custom_configs__table_copy_sql, + models_v2__custom_configs__table_copy_with_dots_sql, + models_v2__limit_null__schema_yml, + models_v2__limit_null__table_failure_limit_null_sql, + models_v2__limit_null__table_limit_null_sql, + models_v2__limit_null__table_warning_limit_null_sql, + models_v2__malformed__schema_yml, + models_v2__malformed__table_copy_sql, + models_v2__malformed__table_summary_sql, models_v2__models__schema_yml, - models_v2__models__table_summary_sql, - models_v2__models__table_failure_summary_sql, + models_v2__models__table_copy_sql, models_v2__models__table_disabled_sql, - models_v2__models__table_failure_null_relation_sql, models_v2__models__table_failure_copy_sql, - models_v2__models__table_copy_sql, - models_v2__limit_null__schema_yml, - models_v2__limit_null__table_warning_limit_null_sql, - models_v2__limit_null__table_limit_null_sql, - models_v2__limit_null__table_failure_limit_null_sql, - models_v2__override_get_test_models__schema_yml, - models_v2__override_get_test_models__my_model_warning_sql, - models_v2__override_get_test_models__my_model_pass_sql, + models_v2__models__table_failure_null_relation_sql, + models_v2__models__table_failure_summary_sql, + models_v2__models__table_summary_sql, models_v2__override_get_test_models__my_model_failure_sql, - models_v2__override_get_test_models_fail__schema_yml, + models_v2__override_get_test_models__my_model_pass_sql, + models_v2__override_get_test_models__my_model_warning_sql, + models_v2__override_get_test_models__schema_yml, models_v2__override_get_test_models_fail__my_model_sql, - models_v2__malformed__schema_yml, - models_v2__malformed__table_summary_sql, - models_v2__malformed__table_copy_sql, - models_v2__custom_configs__schema_yml, - models_v2__custom_configs__table_copy_another_one_sql, - models_v2__custom_configs__table_copy_sql, - models_v2__custom_configs__table_copy_with_dots_sql, - models_v2__custom__schema_yml, - models_v2__custom__table_copy_sql, - models_v2__render_test_cli_arg_models__schema_yml, + models_v2__override_get_test_models_fail__schema_yml, models_v2__render_test_cli_arg_models__model_sql, - models_v2__render_test_configured_arg_models__schema_yml, + models_v2__render_test_cli_arg_models__schema_yml, models_v2__render_test_configured_arg_models__model_sql, - local_utils__dbt_project_yml, - local_utils__macros__datediff_sql, - local_utils__macros__current_timestamp_sql, - local_utils__macros__custom_test_sql, - ephemeral__schema_yml, - ephemeral__ephemeral_sql, - quote_required_models__schema_yml, + models_v2__render_test_configured_arg_models__schema_yml, + name_collision__base_extension_sql, + name_collision__base_sql, + name_collision__schema_yml, quote_required_models__model_again_sql, quote_required_models__model_noquote_sql, quote_required_models__model_sql, - case_sensitive_models__uppercase_SQL, - macro_resolution_order_macros__my_custom_test_sql, - macro_resolution_order_models__config_yml, - macro_resolution_order_models__my_model_sql, - alt_local_utils__macros__type_timestamp_sql, - all_quotes_schema__schema_yml, + quote_required_models__schema_yml, + seeds__some_seed_csv, + test_context_macros__custom_schema_tests_sql, + test_context_macros__my_test_sql, + test_context_macros__test_my_datediff_sql, + test_context_macros_namespaced__custom_schema_tests_sql, + test_context_macros_namespaced__my_test_sql, + test_context_models__model_a_sql, + test_context_models__model_b_sql, + test_context_models__model_c_sql, + test_context_models__schema_yml, + test_context_models_namespaced__model_a_sql, + test_context_models_namespaced__model_b_sql, + test_context_models_namespaced__model_c_sql, + test_context_models_namespaced__schema_yml, + test_context_where_subq_macros__custom_generic_test_sql, + test_context_where_subq_models__model_a_sql, + test_context_where_subq_models__schema_yml, + test_utils__dbt_project_yml, + test_utils__macros__current_timestamp_sql, + test_utils__macros__custom_test_sql, + wrong_specification_block__schema_yml, ) -from dbt.exceptions import ParsingError, CompilationError, DuplicateResourceNameError -from dbt.contracts.results import TestStatus class TestSchemaTests: diff --git a/tests/functional/selected_resources/test_selected_resources.py b/tests/functional/selected_resources/test_selected_resources.py index 550db700af7..25025c56286 100644 --- a/tests/functional/selected_resources/test_selected_resources.py +++ b/tests/functional/selected_resources/test_selected_resources.py @@ -1,10 +1,11 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.selected_resources.fixtures import ( - on_run_start_macro_assert_selected_models_expected_list, my_model1, my_model2, my_snapshot, + on_run_start_macro_assert_selected_models_expected_list, ) diff --git a/tests/functional/selectors/test_default_selectors.py b/tests/functional/selectors/test_default_selectors.py index 3be42bea132..fac60af8545 100644 --- a/tests/functional/selectors/test_default_selectors.py +++ b/tests/functional/selectors/test_default_selectors.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt models__schema_yml = """ diff --git a/tests/functional/semantic_models/test_semantic_model_configs.py b/tests/functional/semantic_models/test_semantic_model_configs.py index 407fb2c3d4d..cac38e92bd3 100644 --- a/tests/functional/semantic_models/test_semantic_model_configs.py +++ b/tests/functional/semantic_models/test_semantic_model_configs.py @@ -1,17 +1,17 @@ import pytest + from dbt.artifacts.resources import SemanticModelConfig from dbt.exceptions import ParsingError -from dbt.tests.util import run_dbt, update_config_file, get_manifest - +from dbt.tests.util import get_manifest, run_dbt, update_config_file from tests.functional.semantic_models.fixtures import ( - models_people_sql, - metricflow_time_spine_sql, - semantic_model_people_yml, disabled_models_people_metrics_yml, - models_people_metrics_yml, disabled_semantic_model_people_yml, enabled_semantic_model_people_yml, groups_yml, + metricflow_time_spine_sql, + models_people_metrics_yml, + models_people_sql, + semantic_model_people_yml, ) diff --git a/tests/functional/semantic_models/test_semantic_model_parsing.py b/tests/functional/semantic_models/test_semantic_model_parsing.py index c5e4d2bb00d..0e8b6d7b5e4 100644 --- a/tests/functional/semantic_models/test_semantic_model_parsing.py +++ b/tests/functional/semantic_models/test_semantic_model_parsing.py @@ -1,18 +1,16 @@ from typing import List import pytest - from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity from dbt.contracts.graph.manifest import Manifest -from dbt_common.events.base_types import BaseEvent from dbt.tests.util import write_file +from dbt_common.events.base_types import BaseEvent from tests.functional.assertions.test_runner import dbtTestRunner - from tests.functional.semantic_models.fixtures import ( - schema_without_semantic_model_yml, fct_revenue_sql, metricflow_time_spine_sql, + schema_without_semantic_model_yml, schema_yml, ) diff --git a/tests/functional/semantic_models/test_semantic_models.py b/tests/functional/semantic_models/test_semantic_models.py index 945b7fdc04b..f773a4f8c5e 100644 --- a/tests/functional/semantic_models/test_semantic_models.py +++ b/tests/functional/semantic_models/test_semantic_models.py @@ -2,16 +2,15 @@ from dbt.contracts.graph.manifest import Manifest from dbt.exceptions import CompilationError -from dbt.tests.util import run_dbt -from dbt.tests.util import write_file +from dbt.tests.util import run_dbt, write_file from tests.functional.semantic_models.fixtures import ( - models_people_sql, - simple_metricflow_time_spine_sql, - semantic_model_people_yml, models_people_metrics_yml, + models_people_sql, + semantic_model_descriptions, semantic_model_people_diff_name_yml, + semantic_model_people_yml, semantic_model_people_yml_with_docs, - semantic_model_descriptions, + simple_metricflow_time_spine_sql, ) diff --git a/tests/functional/show/test_show.py b/tests/functional/show/test_show.py index 5c5ac51f9b1..b1aa16210b8 100644 --- a/tests/functional/show/test_show.py +++ b/tests/functional/show/test_show.py @@ -1,17 +1,18 @@ import pytest -from dbt_common.exceptions import DbtRuntimeError, DbtBaseException as DbtException -from dbt.tests.util import run_dbt_and_capture, run_dbt +from dbt.tests.util import run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.exceptions import DbtRuntimeError from tests.functional.show.fixtures import ( - models__second_ephemeral_model, - seeds__sample_seed, + models__ephemeral_model, models__sample_model, models__sample_number_model, models__sample_number_model_with_nulls, + models__second_ephemeral_model, models__second_model, - models__ephemeral_model, - schema_yml, private_model_yml, + schema_yml, + seeds__sample_seed, ) diff --git a/tests/functional/simple_snapshot/test_basic_snapshot.py b/tests/functional/simple_snapshot/test_basic_snapshot.py index ff4799f10ab..b1a290f6b99 100644 --- a/tests/functional/simple_snapshot/test_basic_snapshot.py +++ b/tests/functional/simple_snapshot/test_basic_snapshot.py @@ -1,20 +1,27 @@ import os from datetime import datetime -import pytz + import pytest -from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name, write_file +import pytz + +from dbt.tests.util import ( + check_relations_equal, + relation_from_name, + run_dbt, + write_file, +) from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__schema_with_target_schema_yml, + macros__test_no_overlaps_sql, + macros_custom_snapshot__custom_sql, models__ref_snapshot_sql, - seeds__seed_newcol_csv, + models__schema_with_target_schema_yml, + models__schema_yml, seeds__seed_csv, - snapshots_pg__snapshot_sql, + seeds__seed_newcol_csv, snapshots_pg__snapshot_no_target_schema_sql, - macros__test_no_overlaps_sql, - macros_custom_snapshot__custom_sql, - snapshots_pg_custom_namespaced__snapshot_sql, + snapshots_pg__snapshot_sql, snapshots_pg_custom__snapshot_sql, + snapshots_pg_custom_namespaced__snapshot_sql, ) snapshots_check_col__snapshot_sql = """ diff --git a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py index 0aee4aedb99..fa5edb51410 100644 --- a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py +++ b/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt snapshot_sql = """ {% snapshot snapshot_check_cols_new_column %} diff --git a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py index 73afa1fc244..b553485e674 100644 --- a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py +++ b/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py @@ -1,8 +1,8 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql - test_snapshots_changing_strategy__test_snapshot_sql = """ {# /* diff --git a/tests/functional/simple_snapshot/test_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_snapshot.py index bb32f27721c..8ee2817c45d 100644 --- a/tests/functional/simple_snapshot/test_check_cols_snapshot.py +++ b/tests/functional/simple_snapshot/test_check_cols_snapshot.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt snapshot_sql = """ {% snapshot check_cols_cycle %} diff --git a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py index c72fb1a1c73..31265c4121f 100644 --- a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py +++ b/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt snapshot_sql = """ {% snapshot snapshot_check_cols_updated_at_actual %} diff --git a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py b/tests/functional/simple_snapshot/test_comment_ending_snapshot.py index 257afb042be..c0d482d9b7d 100644 --- a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py +++ b/tests/functional/simple_snapshot/test_comment_ending_snapshot.py @@ -1,6 +1,8 @@ +import os + import pytest + from dbt.tests.util import run_dbt -import os snapshots_with_comment_at_end__snapshot_sql = """ {% snapshot snapshot_actual %} diff --git a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py b/tests/functional/simple_snapshot/test_cross_schema_snapshot.py index 874f4ef2007..562bb6e0c61 100644 --- a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py +++ b/tests/functional/simple_snapshot/test_cross_schema_snapshot.py @@ -1,14 +1,15 @@ import os + import pytest + from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, snapshots_pg__snapshot_sql, ) - NUM_SNAPSHOT_MODELS = 1 diff --git a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py index 53c61bb8c7f..befd06097f4 100644 --- a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py +++ b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py @@ -1,17 +1,18 @@ import os from datetime import datetime, timedelta -import pytz + import pytest -from dbt.tests.util import run_dbt, check_relations_equal +import pytz + +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, snapshots_pg__snapshot_sql, ) from tests.functional.utils import is_aware - # These tests uses the same seed data, containing 20 records of which we hard delete the last 10. # These deleted records set the dbt_valid_to to time the snapshot was ran. diff --git a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py b/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py index 40b91edf051..c4f6b88f247 100644 --- a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py +++ b/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py @@ -1,13 +1,15 @@ import os + import pytest + from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, macros__test_no_overlaps_sql, macros_custom_snapshot__custom_sql, - seeds__seed_newcol_csv, + models__ref_snapshot_sql, + models__schema_yml, seeds__seed_csv, + seeds__seed_newcol_csv, ) NUM_SNAPSHOT_MODELS = 1 diff --git a/tests/functional/simple_snapshot/test_long_text_snapshot.py b/tests/functional/simple_snapshot/test_long_text_snapshot.py index 332bc384f61..453c4164be3 100644 --- a/tests/functional/simple_snapshot/test_long_text_snapshot.py +++ b/tests/functional/simple_snapshot/test_long_text_snapshot.py @@ -1,9 +1,10 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, ) seed_longtext_sql = """ diff --git a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py index dfb51f7992e..407cd15439f 100644 --- a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py +++ b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py @@ -1,10 +1,11 @@ import pytest -from dbt.tests.util import run_dbt + from dbt.exceptions import ParsingError +from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, ) snapshots_invalid__snapshot_sql = """ diff --git a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py b/tests/functional/simple_snapshot/test_renamed_source_snapshot.py index 6e71ce7cad5..e59f2192051 100644 --- a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py +++ b/tests/functional/simple_snapshot/test_renamed_source_snapshot.py @@ -1,13 +1,13 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import ( - seeds__seed_newcol_csv, - seeds__seed_csv, macros__test_no_overlaps_sql, macros_custom_snapshot__custom_sql, + seeds__seed_csv, + seeds__seed_newcol_csv, ) - snapshots_checkall__snapshot_sql = """ {% snapshot my_snapshot %} {{ config(check_cols='all', unique_key='id', strategy='check', target_database=database, target_schema=schema) }} diff --git a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py b/tests/functional/simple_snapshot/test_select_exclude_snapshot.py index f763c838915..6055ebbff04 100644 --- a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py +++ b/tests/functional/simple_snapshot/test_select_exclude_snapshot.py @@ -1,12 +1,14 @@ import os + import pytest -from dbt.tests.util import run_dbt, check_relations_equal, check_table_does_not_exist + +from dbt.tests.util import check_relations_equal, check_table_does_not_exist, run_dbt from tests.functional.simple_snapshot.fixtures import ( - seeds__seed_newcol_csv, - seeds__seed_csv, - models__schema_yml, - models__ref_snapshot_sql, macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, + seeds__seed_csv, + seeds__seed_newcol_csv, snapshots_pg__snapshot_sql, snapshots_select__snapshot_sql, snapshots_select_noconfig__snapshot_sql, diff --git a/tests/functional/simple_snapshot/test_slow_query_snapshot.py b/tests/functional/simple_snapshot/test_slow_query_snapshot.py index 63dc939dfdd..19a3b83c0e7 100644 --- a/tests/functional/simple_snapshot/test_slow_query_snapshot.py +++ b/tests/functional/simple_snapshot/test_slow_query_snapshot.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql diff --git a/tests/functional/source_overrides/fixtures.py b/tests/functional/source_overrides/fixtures.py index f7f49235d91..ca6d4a03e66 100644 --- a/tests/functional/source_overrides/fixtures.py +++ b/tests/functional/source_overrides/fixtures.py @@ -1,6 +1,5 @@ import pytest - dupe_models__schema2_yml = """ version: 2 sources: diff --git a/tests/functional/source_overrides/test_simple_source_override.py b/tests/functional/source_overrides/test_simple_source_override.py index da1b4856e32..5505f8ca9ba 100644 --- a/tests/functional/source_overrides/test_simple_source_override.py +++ b/tests/functional/source_overrides/test_simple_source_override.py @@ -1,8 +1,9 @@ from datetime import datetime, timedelta + import pytest -from dbt.tests.util import run_dbt, update_config_file, check_relations_equal from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_relations_equal, run_dbt, update_config_file from tests.functional.source_overrides.fixtures import ( # noqa: F401 local_dependency, models__schema_yml, diff --git a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py index e3cdebe4794..b07b04a7f63 100644 --- a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py +++ b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py @@ -1,12 +1,13 @@ import os -from dbt.exceptions import CompilationError + import pytest -from dbt.tests.util import run_dbt +from dbt.exceptions import CompilationError from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt from tests.functional.source_overrides.fixtures import ( # noqa: F401 - dupe_models__schema2_yml, dupe_models__schema1_yml, + dupe_models__schema2_yml, local_dependency, ) diff --git a/tests/functional/sources/common_source_setup.py b/tests/functional/sources/common_source_setup.py index c1e7f0a2568..8608754855d 100644 --- a/tests/functional/sources/common_source_setup.py +++ b/tests/functional/sources/common_source_setup.py @@ -1,19 +1,20 @@ import os + import pytest import yaml from dbt.tests.util import run_dbt, run_dbt_and_capture from tests.functional.sources.fixtures import ( - models_schema_yml, - models_view_model_sql, - models_ephemeral_model_sql, models_descendant_model_sql, + models_ephemeral_model_sql, models_multi_source_model_sql, models_nonsource_descendant_sql, - seeds_source_csv, - seeds_other_table_csv, + models_schema_yml, + models_view_model_sql, seeds_expected_multi_source_csv, seeds_other_source_table_csv, + seeds_other_table_csv, + seeds_source_csv, ) diff --git a/tests/functional/sources/test_simple_source.py b/tests/functional/sources/test_simple_source.py index cd08647f367..3d84dd8a167 100644 --- a/tests/functional/sources/test_simple_source.py +++ b/tests/functional/sources/test_simple_source.py @@ -1,23 +1,22 @@ import os + import pytest import yaml -from dbt.exceptions import ParsingError +from dbt.exceptions import ParsingError from dbt.tests.util import ( - run_dbt, - update_config_file, check_relations_equal, check_table_does_not_exist, + run_dbt, + update_config_file, ) -from tests.functional.sources.common_source_setup import ( - BaseSourcesTest, -) +from tests.functional.sources.common_source_setup import BaseSourcesTest from tests.functional.sources.fixtures import ( macros_macro_sql, - malformed_models_schema_yml, malformed_models_descendant_model_sql, - malformed_schema_tests_schema_yml, + malformed_models_schema_yml, malformed_schema_tests_model_sql, + malformed_schema_tests_schema_yml, ) diff --git a/tests/functional/sources/test_source_configs.py b/tests/functional/sources/test_source_configs.py index 1a967d69269..1ceca5d0522 100644 --- a/tests/functional/sources/test_source_configs.py +++ b/tests/functional/sources/test_source_configs.py @@ -1,15 +1,15 @@ import pytest from dbt.artifacts.resources import SourceConfig +from dbt.tests.util import get_manifest, run_dbt, update_config_file from dbt_common.dataclass_schema import ValidationError -from dbt.tests.util import run_dbt, update_config_file, get_manifest from tests.functional.sources.fixtures import ( - basic_source_schema_yml, - disabled_source_level_schema_yml, - disabled_source_table_schema_yml, all_configs_everywhere_schema_yml, all_configs_not_table_schema_yml, all_configs_project_source_schema_yml, + basic_source_schema_yml, + disabled_source_level_schema_yml, + disabled_source_table_schema_yml, invalid_config_source_schema_yml, ) diff --git a/tests/functional/sources/test_source_fresher_state.py b/tests/functional/sources/test_source_fresher_state.py index 95c048c769f..e1756b220e0 100644 --- a/tests/functional/sources/test_source_fresher_state.py +++ b/tests/functional/sources/test_source_fresher_state.py @@ -1,22 +1,20 @@ -import os import json +import os import shutil -import pytest from datetime import datetime, timedelta -from dbt_common.exceptions import DbtInternalError - +import pytest -from dbt.tests.util import AnyStringWith, AnyFloat import dbt.version +from dbt.contracts.results import FreshnessExecutionResultArtifact +from dbt.tests.util import AnyFloat, AnyStringWith +from dbt_common.exceptions import DbtInternalError from tests.functional.sources.common_source_setup import BaseSourcesTest - from tests.functional.sources.fixtures import ( error_models_schema_yml, - models_newly_added_model_sql, models_newly_added_error_model_sql, + models_newly_added_model_sql, ) -from dbt.contracts.results import FreshnessExecutionResultArtifact # TODO: We may create utility classes to handle reusable fixtures. @@ -626,7 +624,7 @@ def test_intentional_failure_no_previous_state(self, project): with pytest.raises(DbtInternalError) as excinfo: self.run_dbt_with_vars( project, - ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ["run", "-s", "source_status:fresher", "--state", "previous_state"], ) assert "No previous state comparison freshness results in sources.json" in str( excinfo.value diff --git a/tests/functional/sources/test_source_freshness.py b/tests/functional/sources/test_source_freshness.py index 0e58b33b555..565b93a81e6 100644 --- a/tests/functional/sources/test_source_freshness.py +++ b/tests/functional/sources/test_source_freshness.py @@ -1,23 +1,25 @@ -import os import json -import pytest +import os from datetime import datetime, timedelta + +import pytest import yaml import dbt.version +from dbt import deprecations from dbt.artifacts.schemas.freshness import FreshnessResult from dbt.artifacts.schemas.results import FreshnessStatus from dbt.cli.main import dbtRunner +from dbt.tests.util import AnyFloat, AnyStringWith from tests.functional.sources.common_source_setup import BaseSourcesTest from tests.functional.sources.fixtures import ( - error_models_schema_yml, + collect_freshness_macro_override_previous_return_signature, error_models_model_sql, + error_models_schema_yml, filtered_models_schema_yml, - override_freshness_models_schema_yml, - collect_freshness_macro_override_previous_return_signature, freshness_via_metadata_schema_yml, + override_freshness_models_schema_yml, ) -from dbt.tests.util import AnyStringWith, AnyFloat class SuccessfulSourceFreshnessTest(BaseSourcesTest): @@ -414,6 +416,38 @@ def test_metadata_freshness_unsupported_error_when_run(self, project): assert "Could not compute freshness for source test_table" in freshness_result.message +class TestSourceFreshnessProjectHooksNotRun(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["{{ log('on-run-start hooks called') }}"], + "on-run-end": ["{{ log('on-run-end hooks called') }}"], + "flags": { + "source_freshness_run_project_hooks": False, + }, + } + + def test_hooks_do_run_for_source_freshness( + self, + project, + ): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + _, log_output = self.run_dbt_and_capture_with_vars( + project, + [ + "source", + "freshness", + ], + expect_pass=False, + ) + assert "on-run-start hooks called" not in log_output + assert "on-run-end hooks called" not in log_output + expected = {"source-freshness-project-hooks"} + assert expected == deprecations.active_deprecations + + class TestHooksInSourceFreshness(SuccessfulSourceFreshnessTest): @pytest.fixture(scope="class") def project_config_update(self): diff --git a/tests/functional/sources/test_source_loaded_at_field.py b/tests/functional/sources/test_source_loaded_at_field.py new file mode 100644 index 00000000000..b89b8ddd372 --- /dev/null +++ b/tests/functional/sources/test_source_loaded_at_field.py @@ -0,0 +1,136 @@ +import pytest + +from dbt.exceptions import YamlParseDictError +from dbt.tests.util import get_manifest, run_dbt, write_file + +loaded_at_field_null_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: null +""" + +loaded_at_field_blank_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: null +""" + +loaded_at_field_missing_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 +""" + +loaded_at_field_defined_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: updated_at_another_place +""" + +loaded_at_field_empty_string_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: "" +""" + + +class TestParsingLoadedAtField: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": loaded_at_field_null_schema_yml} + + def test_loaded_at_field(self, project): + # test setting loaded_at_field to null explicitly at table level + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "source.test.test_source.table1" in manifest.sources + assert manifest.sources.get("source.test.test_source.table1").loaded_at_field is None + + # test setting loaded_at_field at source level, do not set at table level + # end up with source level loaded_at_field + write_file( + loaded_at_field_missing_schema_yml, project.project_root, "models", "schema.yml" + ) + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.table1" in manifest.sources + assert ( + manifest.sources.get("source.test.test_source.table1").loaded_at_field == "updated_at" + ) + + # test setting loaded_at_field to nothing, should override Source value for None + write_file(loaded_at_field_blank_schema_yml, project.project_root, "models", "schema.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "source.test.test_source.table1" in manifest.sources + assert manifest.sources.get("source.test.test_source.table1").loaded_at_field is None + + # test setting loaded_at_field at table level to a value - it should override source level + write_file( + loaded_at_field_defined_schema_yml, project.project_root, "models", "schema.yml" + ) + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.table1" in manifest.sources + assert ( + manifest.sources.get("source.test.test_source.table1").loaded_at_field + == "updated_at_another_place" + ) + + # test setting loaded_at_field at table level to an empty string - should error + write_file( + loaded_at_field_empty_string_schema_yml, project.project_root, "models", "schema.yml" + ) + with pytest.raises(YamlParseDictError): + run_dbt(["parse"]) diff --git a/tests/functional/statements/test_statements.py b/tests/functional/statements/test_statements.py index 87933af20fa..9e3d5005759 100644 --- a/tests/functional/statements/test_statements.py +++ b/tests/functional/statements/test_statements.py @@ -1,7 +1,8 @@ import pathlib + import pytest -from dbt.tests.util import run_dbt, check_relations_equal, write_file +from dbt.tests.util import check_relations_equal, run_dbt, write_file from tests.functional.statements.fixtures import ( models__statement_actual, models__statement_duplicated_load, diff --git a/tests/functional/adapter/empty/test_empty.py b/tests/functional/test_empty.py similarity index 77% rename from tests/functional/adapter/empty/test_empty.py rename to tests/functional/test_empty.py index a014a640c1f..d284fdcc3e5 100644 --- a/tests/functional/adapter/empty/test_empty.py +++ b/tests/functional/test_empty.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, relation_from_name +from dbt.tests.util import relation_from_name, run_dbt model_input_sql = """ select 1 as id @@ -37,7 +37,7 @@ """ -class BaseTestEmpty: +class TestEmptyFlag: @pytest.fixture(scope="class") def seeds(self): return { @@ -70,6 +70,13 @@ def test_run_with_empty(self, project): run_dbt(["run", "--empty"]) self.assert_row_count(project, "model", 0) + # build without empty - 3 expected rows in output - 1 from each input + run_dbt(["build"]) + self.assert_row_count(project, "model", 3) + + # build with empty - 0 expected rows in output + run_dbt(["build", "--empty"]) + self.assert_row_count(project, "model", 0) -class TestEmpty(BaseTestEmpty): - pass + # ensure dbt compile supports --empty flag + run_dbt(["compile", "--empty"]) diff --git a/tests/functional/test_selection/fixtures.py b/tests/functional/test_selection/fixtures.py index af591604299..37cce1bbae9 100644 --- a/tests/functional/test_selection/fixtures.py +++ b/tests/functional/test_selection/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files tests__cf_a_b_sql = """ select * from {{ ref('model_a') }} diff --git a/tests/functional/test_selection/test_selection_expansion.py b/tests/functional/test_selection/test_selection_expansion.py index 3e6f1f82ae3..e42d6196328 100644 --- a/tests/functional/test_selection/test_selection_expansion.py +++ b/tests/functional/test_selection/test_selection_expansion.py @@ -2,9 +2,9 @@ from dbt.tests.util import run_dbt from tests.functional.test_selection.fixtures import ( # noqa: F401 - tests, models, project_files, + tests, ) diff --git a/tests/functional/threading/test_thread_count.py b/tests/functional/threading/test_thread_count.py index c31f5ed6312..9c94356e630 100644 --- a/tests/functional/threading/test_thread_count.py +++ b/tests/functional/threading/test_thread_count.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt models__do_nothing__sql = """ with x as (select pg_sleep(1)) select 1 diff --git a/tests/functional/timezones/test_timezones.py b/tests/functional/timezones/test_timezones.py index a4627fb63c3..1e2de2585f3 100644 --- a/tests/functional/timezones/test_timezones.py +++ b/tests/functional/timezones/test_timezones.py @@ -1,9 +1,9 @@ import os + import pytest from dbt.tests.util import run_dbt - # Canada/Saskatchewan does not observe DST so the time diff won't change depending on when it is in the year model_sql = """ {{ diff --git a/tests/functional/unit_testing/fixtures.py b/tests/functional/unit_testing/fixtures.py index 54f0497250a..3028e0bc1e6 100644 --- a/tests/functional/unit_testing/fixtures.py +++ b/tests/functional/unit_testing/fixtures.py @@ -41,6 +41,15 @@ 'b' as string_b """ +my_model_check_null_sql = """ +SELECT +CASE + WHEN a IS null THEN True + ELSE False +END a_is_null +FROM {{ ref('my_model_a') }} +""" + test_my_model_yml = """ unit_tests: - name: test_my_model @@ -507,6 +516,11 @@ 1,a """ +test_my_model_a_with_null_fixture_csv = """id,a +1, +2,3 +""" + test_my_model_a_empty_fixture_csv = """ """ @@ -1041,3 +1055,58 @@ def external_package(): "external_model.sql": external_package__external_model_sql, }, } + + +model_select_1_sql = """ +select 1 as id +""" + +model_select_2_sql = """ +select 2 as id +""" + +test_expect_2_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: [] + expect: + rows: + - {id: 2} +""" + + +test_my_model_csv_null_yml = """ +unit_tests: + - name: test_my_model_check_null + model: my_model_check_null + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1, + 2,3 + expect: + format: csv + rows: | + a_is_null + True + False +""" + +test_my_model_file_csv_null_yml = """ +unit_tests: + - name: test_my_model_check_null + model: my_model_check_null + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_with_null_fixture + expect: + format: csv + rows: | + a_is_null + True + False +""" diff --git a/tests/functional/unit_testing/test_csv_fixtures.py b/tests/functional/unit_testing/test_csv_fixtures.py index 6aae95abed6..11a910edfb6 100644 --- a/tests/functional/unit_testing/test_csv_fixtures.py +++ b/tests/functional/unit_testing/test_csv_fixtures.py @@ -1,27 +1,32 @@ import pytest -from dbt.exceptions import ParsingError, YamlParseDictError, DuplicateResourceNameError -from dbt.tests.util import run_dbt, write_file, rm_file from fixtures import ( - my_model_sql, - my_model_a_sql, - my_model_b_sql, - test_my_model_csv_yml, datetime_test, - datetime_test_invalid_format_key, datetime_test_invalid_csv_values, - test_my_model_file_csv_yml, - test_my_model_fixture_csv, + datetime_test_invalid_format_key, + my_model_a_sql, + my_model_b_sql, + my_model_check_null_sql, + my_model_sql, + test_my_model_a_empty_fixture_csv, test_my_model_a_fixture_csv, + test_my_model_a_numeric_fixture_csv, + test_my_model_a_with_null_fixture_csv, test_my_model_b_fixture_csv, test_my_model_basic_fixture_csv, - test_my_model_a_numeric_fixture_csv, - test_my_model_a_empty_fixture_csv, test_my_model_concat_fixture_csv, - test_my_model_mixed_csv_yml, - test_my_model_missing_csv_yml, + test_my_model_csv_null_yml, + test_my_model_csv_yml, test_my_model_duplicate_csv_yml, + test_my_model_file_csv_null_yml, + test_my_model_file_csv_yml, + test_my_model_fixture_csv, + test_my_model_missing_csv_yml, + test_my_model_mixed_csv_yml, ) +from dbt.exceptions import DuplicateResourceNameError, ParsingError, YamlParseDictError +from dbt.tests.util import rm_file, run_dbt, write_file + class TestUnitTestsWithInlineCSV: @pytest.fixture(scope="class") @@ -207,6 +212,50 @@ def test_unit_test(self, project): results = run_dbt(["test", "--select", "my_model"], expect_pass=False) +class TestUnitTestsInlineCSVEmptyValueIsNull: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_check_null.sql": my_model_check_null_sql, + "test_my_model_csv_null.yml": test_my_model_csv_null_yml, + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select by model name + results = run_dbt(["test", "--select", "my_model_check_null"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestsFileCSVEmptyValueIsNull: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_check_null.sql": my_model_check_null_sql, + "test_my_model_file_csv_null.yml": test_my_model_file_csv_null_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_a_with_null_fixture.csv": test_my_model_a_with_null_fixture_csv, + } + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select by model name + results = run_dbt(["test", "--select", "my_model_check_null"], expect_pass=True) + assert len(results) == 1 + + class TestUnitTestsMissingCSVFile: @pytest.fixture(scope="class") def models(self): diff --git a/tests/functional/unit_testing/test_sql_format.py b/tests/functional/unit_testing/test_sql_format.py index 6b5af93e1ba..700008eaf9c 100644 --- a/tests/functional/unit_testing/test_sql_format.py +++ b/tests/functional/unit_testing/test_sql_format.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt wizards_csv = """id,w_name,email,email_tld,phone,world diff --git a/tests/functional/unit_testing/test_state.py b/tests/functional/unit_testing/test_state.py index 1d56fa8c221..cf87f49b406 100644 --- a/tests/functional/unit_testing/test_state.py +++ b/tests/functional/unit_testing/test_state.py @@ -1,21 +1,20 @@ import os -import pytest import shutil from copy import deepcopy -from dbt.tests.util import ( - run_dbt, - write_file, - write_config_file, -) +import pytest from fixtures import ( - my_model_vars_sql, + model_select_1_sql, + model_select_2_sql, my_model_a_sql, my_model_b_sql, - test_my_model_simple_fixture_yml, - test_my_model_fixture_csv, - test_my_model_b_fixture_csv as test_my_model_fixture_csv_modified, + my_model_vars_sql, + test_expect_2_yml, ) +from fixtures import test_my_model_b_fixture_csv as test_my_model_fixture_csv_modified +from fixtures import test_my_model_fixture_csv, test_my_model_simple_fixture_yml + +from dbt.tests.util import run_dbt, write_config_file, write_file class UnitTestState: @@ -131,3 +130,22 @@ def test_unit_test_defer_state(self, project): results = run_dbt(["test", "--defer", "--state", "state"], expect_pass=False) assert len(results) == 4 assert sorted([r.status for r in results]) == ["fail", "pass", "pass", "pass"] + + +class TestUnitTestDeferDoesntOverwrite(UnitTestState): + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": model_select_1_sql, "test_my_model.yml": test_expect_2_yml} + + def test_unit_test_defer_state(self, project): + run_dbt(["test"], expect_pass=False) + self.copy_state(project.project_root) + write_file( + model_select_2_sql, + project.project_root, + "models", + "my_model.sql", + ) + results = run_dbt(["test", "--defer", "--state", "state"]) + assert len(results) == 1 + assert sorted([r.status for r in results]) == ["pass"] diff --git a/tests/functional/unit_testing/test_unit_testing.py b/tests/functional/unit_testing/test_unit_testing.py index 887c1907e76..7332ddccb64 100644 --- a/tests/functional/unit_testing/test_unit_testing.py +++ b/tests/functional/unit_testing/test_unit_testing.py @@ -1,37 +1,39 @@ -import pytest import os from unittest import mock -from dbt.tests.util import ( - run_dbt, - write_file, - get_manifest, - run_dbt_and_capture, - read_file, - file_exists, -) -from dbt.contracts.results import NodeStatus -from dbt.exceptions import DuplicateResourceNameError, ParsingError -from dbt.plugins.manifest import PluginNodes, ModelNodeArgs -from dbt.tests.fixtures.project import write_project_files + +import pytest from fixtures import ( # noqa: F401 - my_model_sql, - my_model_vars_sql, - my_model_a_sql, - my_model_b_sql, - test_my_model_yml, datetime_test, - my_incremental_model_sql, event_sql, + external_package, + external_package__accounts_seed_csv, + my_incremental_model_sql, + my_model_a_sql, + my_model_b_sql, + my_model_sql, + my_model_vars_sql, test_my_model_incremental_yml_basic, + test_my_model_incremental_yml_no_override, + test_my_model_incremental_yml_no_this_input, + test_my_model_incremental_yml_wrong_override, + test_my_model_yml, test_my_model_yml_invalid, test_my_model_yml_invalid_ref, - valid_emails_sql, top_level_domains_sql, - external_package__accounts_seed_csv, - external_package, - test_my_model_incremental_yml_no_override, - test_my_model_incremental_yml_wrong_override, - test_my_model_incremental_yml_no_this_input, + valid_emails_sql, +) + +from dbt.contracts.results import NodeStatus +from dbt.exceptions import DuplicateResourceNameError, ParsingError +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import ( + file_exists, + get_manifest, + read_file, + run_dbt, + run_dbt_and_capture, + write_file, ) from tests.unit.utils import normalize diff --git a/tests/functional/unit_testing/test_ut_dependency.py b/tests/functional/unit_testing/test_ut_dependency.py index 4ff22a1f119..32e5a0111b7 100644 --- a/tests/functional/unit_testing/test_ut_dependency.py +++ b/tests/functional/unit_testing/test_ut_dependency.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, get_unique_ids_in_results + from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_unique_ids_in_results, run_dbt local_dependency__dbt_project_yml = """ diff --git a/tests/functional/unit_testing/test_ut_diffing.py b/tests/functional/unit_testing/test_ut_diffing.py new file mode 100644 index 00000000000..dd23ea25fdb --- /dev/null +++ b/tests/functional/unit_testing/test_ut_diffing.py @@ -0,0 +1,113 @@ +import pytest + +from dbt.tests.util import run_dbt + +my_input_model = """ +SELECT 1 as id, 'some string' as status +""" + +my_model = """ +SELECT * FROM {{ ref("my_input_model") }} +""" + +test_my_model_order_insensitive = """ +unit_tests: + - name: unordered_no_nulls + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 1, "status": 'B'} + - {"id": 2, "status": 'B'} + - {"id": 3, "status": 'A'} + expect: + rows: + - {"id": 3, "status": 'A'} + - {"id": 2, "status": 'B'} + - {"id": 1, "status": 'B'} + + - name: unordered_with_nulls + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + expect: + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + + - name: unordered_with_nulls_2 + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + expect: + rows: + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + + - name: unordered_with_nulls_mixed_columns + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": 1, "status": } + expect: + rows: + - {"id": 1, "status": } + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + + - name: unordered_with_null + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + expect: + rows: + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + + - name: ordered_with_nulls + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + expect: + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} +""" + + +class TestUnitTestingDiffIsOrderAgnostic: + @pytest.fixture(scope="class") + def models(self): + return { + "my_input_model.sql": my_input_model, + "my_model.sql": my_model, + "test_my_model.yml": test_my_model_order_insensitive, + } + + def test_unit_testing_diff_is_order_insensitive(self, project): + run_dbt(["run"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=True) + assert len(results) == 6 diff --git a/tests/functional/unit_testing/test_ut_ephemeral.py b/tests/functional/unit_testing/test_ut_ephemeral.py index 2898633ec40..cb2de2d2596 100644 --- a/tests/functional/unit_testing/test_ut_ephemeral.py +++ b/tests/functional/unit_testing/test_ut_ephemeral.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, write_file -from dbt.contracts.results import RunStatus, TestStatus +from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt, write_file ephemeral_model_sql = """ {{ config(materialized="ephemeral") }} diff --git a/tests/functional/unit_testing/test_ut_list.py b/tests/functional/unit_testing/test_ut_list.py index f952d2fe484..0b4f263909b 100644 --- a/tests/functional/unit_testing/test_ut_list.py +++ b/tests/functional/unit_testing/test_ut_list.py @@ -1,16 +1,17 @@ -import pytest -from dbt.tests.util import run_dbt import json import os +import pytest from fixtures import ( # noqa: F401 - my_model_vars_sql, + datetime_test, my_model_a_sql, my_model_b_sql, + my_model_vars_sql, test_my_model_yml, - datetime_test, ) +from dbt.tests.util import run_dbt + class TestUnitTestList: @pytest.fixture(scope="class") diff --git a/tests/functional/unit_testing/test_ut_names.py b/tests/functional/unit_testing/test_ut_names.py index d1721438576..a2de3764da4 100644 --- a/tests/functional/unit_testing/test_ut_names.py +++ b/tests/functional/unit_testing/test_ut_names.py @@ -1,8 +1,4 @@ import pytest - -from dbt.tests.util import run_dbt, run_dbt_and_capture -from dbt.exceptions import DuplicateResourceNameError - from fixtures import ( my_model_a_sql, my_model_b_sql, @@ -10,6 +6,9 @@ test_model_a_with_duplicate_test_name_yml, ) +from dbt.exceptions import DuplicateResourceNameError +from dbt.tests.util import run_dbt, run_dbt_and_capture + class TestUnitTestDuplicateTestNamesAcrossModels: @pytest.fixture(scope="class") diff --git a/tests/functional/unit_testing/test_ut_overrides.py b/tests/functional/unit_testing/test_ut_overrides.py index c8102e47ce2..466ceadc437 100644 --- a/tests/functional/unit_testing/test_ut_overrides.py +++ b/tests/functional/unit_testing/test_ut_overrides.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt my_model_with_macros = """ SELECT diff --git a/tests/functional/unit_testing/test_ut_snapshot_dependency.py b/tests/functional/unit_testing/test_ut_snapshot_dependency.py index f1b20d3e554..a76fe1a39f4 100644 --- a/tests/functional/unit_testing/test_ut_snapshot_dependency.py +++ b/tests/functional/unit_testing/test_ut_snapshot_dependency.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.util import run_dbt + from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt raw_customers_csv = """id,first_name,last_name,email,gender,ip_address,updated_at 1,'Judith','Kennedy','(not provided)','Female','54.60.24.128','2015-12-24 12:19:28' diff --git a/tests/functional/unit_testing/test_ut_sources.py b/tests/functional/unit_testing/test_ut_sources.py index 2779d698790..61a45f70f76 100644 --- a/tests/functional/unit_testing/test_ut_sources.py +++ b/tests/functional/unit_testing/test_ut_sources.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, write_file + from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt, write_file raw_customers_csv = """id,first_name,last_name,email 1,Michael,Perez,mperez0@chronoengine.com diff --git a/tests/functional/unit_testing/test_ut_versions.py b/tests/functional/unit_testing/test_ut_versions.py index 98afbcf28ad..059688ce473 100644 --- a/tests/functional/unit_testing/test_ut_versions.py +++ b/tests/functional/unit_testing/test_ut_versions.py @@ -1,23 +1,23 @@ import pytest -from dbt.tests.util import run_dbt, get_unique_ids_in_results, write_file -from dbt.exceptions import YamlParseDictError, ParsingError +from dbt.exceptions import ParsingError, YamlParseDictError +from dbt.tests.util import get_unique_ids_in_results, run_dbt, write_file from tests.functional.unit_testing.fixtures import ( - my_model_versioned_yml, + my_model_a_sql, + my_model_b_sql, + my_model_sql, + my_model_v1_sql, + my_model_v2_sql, + my_model_v3_sql, + my_model_version_ref_sql, my_model_versioned_no_2_yml, + my_model_versioned_yml, test_my_model_all_versions_yml, test_my_model_exclude_versions_yml, - test_my_model_include_versions_yml, test_my_model_include_exclude_versions_yml, test_my_model_include_unversioned_yml, + test_my_model_include_versions_yml, test_my_model_version_ref_yml, - my_model_v1_sql, - my_model_v2_sql, - my_model_v3_sql, - my_model_a_sql, - my_model_b_sql, - my_model_sql, - my_model_version_ref_sql, ) diff --git a/tests/functional/utils.py b/tests/functional/utils.py index a82aa378e43..8af471e7c04 100644 --- a/tests/functional/utils.py +++ b/tests/functional/utils.py @@ -1,8 +1,11 @@ import os from contextlib import contextmanager +from dataclasses import dataclass, field from datetime import datetime -from typing import Optional from pathlib import Path +from typing import List, Optional + +from dbt_common.events.base_types import BaseEvent, EventMsg @contextmanager @@ -17,3 +20,16 @@ def up_one(return_path: Optional[Path] = None): def is_aware(dt: datetime) -> bool: return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None + + +@dataclass +class EventCatcher: + event_to_catch: BaseEvent + caught_events: List[EventMsg] = field(default_factory=list) + + def catch(self, event: EventMsg): + if event.info.name == self.event_to_catch.__name__: + self.caught_events.append(event) + + def flush(self) -> None: + self.caught_events = [] diff --git a/tests/unit/artifacts/test_base_resource.py b/tests/unit/artifacts/test_base_resource.py new file mode 100644 index 00000000000..6809d524cd1 --- /dev/null +++ b/tests/unit/artifacts/test_base_resource.py @@ -0,0 +1,58 @@ +from dataclasses import dataclass + +import pytest + +from dbt.artifacts.resources.base import BaseResource +from dbt.artifacts.resources.types import NodeType + + +@dataclass +class BaseResourceWithDefaultField(BaseResource): + field_with_default: bool = True + + +class TestMinorSchemaChange: + @pytest.fixture + def base_resource(self): + return BaseResource( + name="test", + resource_type=NodeType.Model, + package_name="test_package", + path="test_path", + original_file_path="test_original_file_path", + unique_id="test_unique_id", + ) + + @pytest.fixture + def base_resource_new_default_field(self): + return BaseResourceWithDefaultField( + name="test", + resource_type=NodeType.Model, + package_name="test_package", + path="test_path", + original_file_path="test_original_file_path", + unique_id="test_unique_id", + field_with_default=False, + ) + + def test_serializing_new_default_field_is_backward_compatabile( + self, base_resource_new_default_field + ): + # old code (using old class) can create an instance of itself given new data (new class) + BaseResource.from_dict(base_resource_new_default_field.to_dict()) + + def test_serializing_new_default_field_is_forward_compatible(self, base_resource): + # new code (using new class) can create an instance of itself given old data (old class) + BaseResourceWithDefaultField.from_dict(base_resource.to_dict()) + + def test_serializing_removed_default_field_is_backward_compatabile(self, base_resource): + # old code (using old class with default field) can create an instance of itself given new data (class w/o default field) + old_resource = BaseResourceWithDefaultField.from_dict(base_resource.to_dict()) + # set to the default value when not provided in data + assert old_resource.field_with_default is True + + def test_serializing_removed_default_field_is_forward_compatible( + self, base_resource_new_default_field + ): + # new code (using class without default field) can create an instance of itself given old data (class with old field) + BaseResource.from_dict(base_resource_new_default_field.to_dict()) diff --git a/tests/unit/cli/test_flags.py b/tests/unit/cli/test_flags.py index 6bf9d692e0e..da5a8490d84 100644 --- a/tests/unit/cli/test_flags.py +++ b/tests/unit/cli/test_flags.py @@ -9,9 +9,9 @@ from dbt.cli.main import cli from dbt.cli.types import Command from dbt.contracts.project import ProjectFlags +from dbt.tests.util import rm_file, write_file from dbt_common.exceptions import DbtInternalError from dbt_common.helper_types import WarnErrorOptions -from dbt.tests.util import rm_file, write_file class TestFlags: diff --git a/tests/unit/test_option_types.py b/tests/unit/cli/test_option_types.py similarity index 95% rename from tests/unit/test_option_types.py rename to tests/unit/cli/test_option_types.py index 67d3c5e941f..1067f64a3c3 100644 --- a/tests/unit/test_option_types.py +++ b/tests/unit/cli/test_option_types.py @@ -1,5 +1,5 @@ -from click import Option, BadParameter import pytest +from click import BadParameter, Option from dbt.cli.option_types import YAML diff --git a/tests/functional/adapter/empty/__init__.py b/tests/unit/clients/__init__.py similarity index 100% rename from tests/functional/adapter/empty/__init__.py rename to tests/unit/clients/__init__.py diff --git a/tests/unit/test_jinja.py b/tests/unit/clients/test_jinja.py similarity index 84% rename from tests/unit/test_jinja.py rename to tests/unit/clients/test_jinja.py index e4b0596fb02..5f15d9e3f44 100644 --- a/tests/unit/test_jinja.py +++ b/tests/unit/clients/test_jinja.py @@ -1,10 +1,9 @@ from contextlib import contextmanager + import pytest -import unittest import yaml -from dbt.clients.jinja import get_rendered -from dbt.clients.jinja import get_template +from dbt.clients.jinja import get_rendered, get_template from dbt_common.exceptions import JinjaRenderingError @@ -367,7 +366,7 @@ def expected_id(arg): @pytest.mark.parametrize("value,text_expectation,native_expectation", jinja_tests, ids=expected_id) -def test_jinja_rendering(value, text_expectation, native_expectation): +def test_jinja_rendering_string(value, text_expectation, native_expectation): foo_value = yaml.safe_load(value)["foo"] ctx = {"a_str": "100", "a_int": 100, "b_str": "hello"} with text_expectation as text_result: @@ -377,40 +376,41 @@ def test_jinja_rendering(value, text_expectation, native_expectation): assert native_result == get_rendered(foo_value, ctx, native=True) -class TestJinja(unittest.TestCase): - def test_do(self): - s = "{% set my_dict = {} %}\n{% do my_dict.update(a=1) %}" +def test_do(): + s = "{% set my_dict = {} %}\n{% do my_dict.update(a=1) %}" + + template = get_template(s, {}) + mod = template.make_module() + assert mod.my_dict == {"a": 1} + - template = get_template(s, {}) - mod = template.make_module() - self.assertEqual(mod.my_dict, {"a": 1}) +def test_regular_render(): + s = '{{ "some_value" | as_native }}' + value = get_rendered(s, {}, native=False) + assert value == "some_value" + s = "{{ 1991 | as_native }}" + value = get_rendered(s, {}, native=False) + assert value == "1991" - def test_regular_render(self): - s = '{{ "some_value" | as_native }}' - value = get_rendered(s, {}, native=False) - assert value == "some_value" - s = "{{ 1991 | as_native }}" - value = get_rendered(s, {}, native=False) - assert value == "1991" + s = '{{ "some_value" | as_text }}' + value = get_rendered(s, {}, native=False) + assert value == "some_value" + s = "{{ 1991 | as_text }}" + value = get_rendered(s, {}, native=False) + assert value == "1991" - s = '{{ "some_value" | as_text }}' - value = get_rendered(s, {}, native=False) - assert value == "some_value" - s = "{{ 1991 | as_text }}" - value = get_rendered(s, {}, native=False) - assert value == "1991" - def test_native_render(self): - s = '{{ "some_value" | as_native }}' - value = get_rendered(s, {}, native=True) - assert value == "some_value" - s = "{{ 1991 | as_native }}" - value = get_rendered(s, {}, native=True) - assert value == 1991 +def test_native_render(): + s = '{{ "some_value" | as_native }}' + value = get_rendered(s, {}, native=True) + assert value == "some_value" + s = "{{ 1991 | as_native }}" + value = get_rendered(s, {}, native=True) + assert value == 1991 - s = '{{ "some_value" | as_text }}' - value = get_rendered(s, {}, native=True) - assert value == "some_value" - s = "{{ 1991 | as_text }}" - value = get_rendered(s, {}, native=True) - assert value == "1991" + s = '{{ "some_value" | as_text }}' + value = get_rendered(s, {}, native=True) + assert value == "some_value" + s = "{{ 1991 | as_text }}" + value = get_rendered(s, {}, native=True) + assert value == "1991" diff --git a/tests/unit/test_macro_calls.py b/tests/unit/clients/test_jinja_static.py similarity index 100% rename from tests/unit/test_macro_calls.py rename to tests/unit/clients/test_jinja_static.py diff --git a/tests/unit/test_registry_get_request_exception.py b/tests/unit/clients/test_registry.py similarity index 100% rename from tests/unit/test_registry_get_request_exception.py rename to tests/unit/clients/test_registry.py index 615f9024462..d6afd9c8f66 100644 --- a/tests/unit/test_registry_get_request_exception.py +++ b/tests/unit/clients/test_registry.py @@ -1,7 +1,7 @@ import unittest -from dbt_common.exceptions import ConnectionError from dbt.clients.registry import _get_with_retries +from dbt_common.exceptions import ConnectionError class testRegistryGetRequestException(unittest.TestCase): diff --git a/tests/unit/config/__init__.py b/tests/unit/config/__init__.py index 073cf3d6499..b9f943c78e2 100644 --- a/tests/unit/config/__init__.py +++ b/tests/unit/config/__init__.py @@ -1,10 +1,9 @@ -from contextlib import contextmanager import os import shutil import tempfile import unittest from argparse import Namespace - +from contextlib import contextmanager import yaml @@ -12,10 +11,7 @@ import dbt.exceptions from dbt import flags from dbt.constants import PACKAGES_FILE_NAME - - from dbt.flags import set_from_args - from tests.unit.utils import normalize INITIAL_ROOT = os.getcwd() diff --git a/tests/unit/config/test_profile.py b/tests/unit/config/test_profile.py index 7c53b715ab9..e79d0935582 100644 --- a/tests/unit/config/test_profile.py +++ b/tests/unit/config/test_profile.py @@ -1,18 +1,17 @@ -from copy import deepcopy - import os +from copy import deepcopy from unittest import mock + import dbt.config import dbt.exceptions - from dbt.adapters.postgres import PostgresCredentials - - from dbt.flags import set_from_args from dbt.tests.util import safe_set_invocation_context - - -from tests.unit.config import BaseConfigTest, empty_profile_renderer, project_from_config_norender +from tests.unit.config import ( + BaseConfigTest, + empty_profile_renderer, + project_from_config_norender, +) class TestProfile(BaseConfigTest): diff --git a/tests/unit/config/test_project.py b/tests/unit/config/test_project.py index 46dbda6b909..7d0006570af 100644 --- a/tests/unit/config/test_project.py +++ b/tests/unit/config/test_project.py @@ -1,33 +1,89 @@ -from copy import deepcopy import json import os import unittest -import pytest - +from copy import deepcopy from unittest import mock +import pytest + import dbt.config -from dbt.constants import DEPENDENCIES_FILE_NAME import dbt.exceptions +from dbt.adapters.contracts.connection import DEFAULT_QUERY_COMMENT, QueryComment from dbt.adapters.factory import load_plugin -from dbt.adapters.contracts.connection import QueryComment, DEFAULT_QUERY_COMMENT -from dbt.contracts.project import PackageConfig, LocalPackage, GitPackage -from dbt.node_types import NodeType -from dbt_common.semver import VersionSpecifier - +from dbt.config.project import Project +from dbt.constants import DEPENDENCIES_FILE_NAME +from dbt.contracts.project import GitPackage, LocalPackage, PackageConfig from dbt.flags import set_from_args +from dbt.node_types import NodeType from dbt.tests.util import safe_set_invocation_context - - +from dbt_common.exceptions import DbtRuntimeError +from dbt_common.semver import VersionSpecifier from tests.unit.config import ( BaseConfigTest, - project_from_config_norender, empty_project_renderer, + project_from_config_norender, project_from_config_rendered, ) -class TestProject(BaseConfigTest): +class TestProjectMethods: + def test_all_source_paths(self, project: Project): + assert ( + project.all_source_paths.sort() + == ["models", "seeds", "snapshots", "analyses", "macros"].sort() + ) + + def test_generic_test_paths(self, project: Project): + assert project.generic_test_paths == ["tests/generic"] + + def test_fixture_paths(self, project: Project): + assert project.fixture_paths == ["tests/fixtures"] + + def test__str__(self, project: Project): + assert ( + str(project) + == "{'name': 'test_project', 'version': 1.0, 'project-root': 'doesnt/actually/exist', 'profile': 'test_profile', 'model-paths': ['models'], 'macro-paths': ['macros'], 'seed-paths': ['seeds'], 'test-paths': ['tests'], 'analysis-paths': ['analyses'], 'docs-paths': ['docs'], 'asset-paths': ['assets'], 'target-path': 'target', 'snapshot-paths': ['snapshots'], 'clean-targets': ['target'], 'log-path': 'path/to/project/logs', 'quoting': {'database': True, 'schema': True, 'identifier': True}, 'models': {}, 'on-run-start': [], 'on-run-end': [], 'dispatch': [{'macro_namespace': 'dbt_utils', 'search_order': ['test_project', 'dbt_utils']}], 'seeds': {}, 'snapshots': {}, 'sources': {}, 'data_tests': {}, 'unit_tests': {}, 'metrics': {}, 'semantic-models': {}, 'saved-queries': {}, 'exposures': {}, 'vars': {}, 'require-dbt-version': ['=0.0.0'], 'restrict-access': False, 'dbt-cloud': {}, 'query-comment': {'comment': \"\\n{%- set comment_dict = {} -%}\\n{%- do comment_dict.update(\\n app='dbt',\\n dbt_version=dbt_version,\\n profile_name=target.get('profile_name'),\\n target_name=target.get('target_name'),\\n) -%}\\n{%- if node is not none -%}\\n {%- do comment_dict.update(\\n node_id=node.unique_id,\\n ) -%}\\n{% else %}\\n {# in the node context, the connection name is the node_id #}\\n {%- do comment_dict.update(connection_name=connection_name) -%}\\n{%- endif -%}\\n{{ return(tojson(comment_dict)) }}\\n\", 'append': False, 'job-label': False}, 'packages': []}" + ) + + def test_get_selector(self, project: Project): + selector = project.get_selector("my_selector") + assert selector.raw == "give me cats" + + with pytest.raises(DbtRuntimeError): + project.get_selector("doesnt_exist") + + def test_get_default_selector_name(self, project: Project): + default_selector_name = project.get_default_selector_name() + assert default_selector_name == "my_selector" + + project.selectors["my_selector"]["default"] = False + default_selector_name = project.get_default_selector_name() + assert default_selector_name is None + + def test_get_macro_search_order(self, project: Project): + search_order = project.get_macro_search_order("dbt_utils") + assert search_order == ["test_project", "dbt_utils"] + + search_order = project.get_macro_search_order("doesnt_exist") + assert search_order is None + + def test_project_target_path(self, project: Project): + assert project.project_target_path == "doesnt/actually/exist/target" + + def test_eq(self, project: Project): + other = deepcopy(project) + assert project == other + + def test_neq(self, project: Project): + other = deepcopy(project) + other.project_name = "other project" + assert project != other + + def test_hashed_name(self, project: Project): + assert project.hashed_name() == "6e72a69d5c5cca8f0400338441c022e4" + + +class TestProjectInitialization(BaseConfigTest): def test_defaults(self): project = project_from_config_norender( self.default_project_data, project_root=self.project_dir @@ -60,21 +116,6 @@ def test_defaults(self): # embarrassing str(project) - def test_eq(self): - project = project_from_config_norender( - self.default_project_data, project_root=self.project_dir - ) - other = project_from_config_norender( - self.default_project_data, project_root=self.project_dir - ) - self.assertEqual(project, other) - - def test_neq(self): - project = project_from_config_norender( - self.default_project_data, project_root=self.project_dir - ) - self.assertNotEqual(project, object()) - def test_implicit_overrides(self): self.default_project_data.update( { @@ -89,12 +130,6 @@ def test_implicit_overrides(self): set(["other-models", "seeds", "snapshots", "analyses", "macros"]), ) - def test_hashed_name(self): - project = project_from_config_norender( - self.default_project_data, project_root=self.project_dir - ) - self.assertEqual(project.hashed_name(), "754cd47eac1d6f50a5f7cd399ec43da4") - def test_all_overrides(self): # log-path is not tested because it is set exclusively from flags, not cfg self.default_project_data.update( diff --git a/tests/unit/config/test_runtime.py b/tests/unit/config/test_runtime.py index 84220d53bbf..6d2b18fd896 100644 --- a/tests/unit/config/test_runtime.py +++ b/tests/unit/config/test_runtime.py @@ -1,16 +1,13 @@ import os from argparse import Namespace - from unittest import mock import dbt.config import dbt.exceptions from dbt import tracking from dbt.contracts.project import PackageConfig - from dbt.flags import set_from_args from dbt.tests.util import safe_set_invocation_context - from tests.unit.config import ( BaseConfigTest, empty_profile_renderer, diff --git a/tests/unit/test_selector_errors.py b/tests/unit/config/test_selectors.py similarity index 50% rename from tests/unit/test_selector_errors.py rename to tests/unit/config/test_selectors.py index 017c7dee22b..d306fb55282 100644 --- a/tests/unit/test_selector_errors.py +++ b/tests/unit/config/test_selectors.py @@ -1,10 +1,11 @@ -import dbt.exceptions import textwrap -import yaml import unittest -from dbt.config.selectors import selector_config_from_data -from dbt.config.selectors import SelectorConfig +import yaml + +import dbt.exceptions +from dbt.config.selectors import SelectorConfig, SelectorDict, selector_config_from_data +from dbt.exceptions import DbtSelectorsError def get_selector_dict(txt: str) -> dict: @@ -201,3 +202,188 @@ def test_multiple_default_true(self): dbt.exceptions.DbtSelectorsError, "Found multiple selectors with `default: true`:" ): selector_config_from_data(dct) + + def test_compare_cli_non_cli(self): + dct = get_selector_dict( + """\ + selectors: + - name: nightly_diet_snowplow + description: "This uses more CLI-style syntax" + definition: + union: + - intersection: + - '@source:snowplow' + - 'tag:nightly' + - 'models/export' + - exclude: + - intersection: + - 'package:snowplow' + - 'config.materialized:incremental' + - export_performance_timing + - name: nightly_diet_snowplow_full + description: "This is a fuller YAML specification" + definition: + union: + - intersection: + - method: source + value: snowplow + childrens_parents: true + - method: tag + value: nightly + - method: path + value: models/export + - exclude: + - intersection: + - method: package + value: snowplow + - method: config.materialized + value: incremental + - method: fqn + value: export_performance_timing + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + with_strings = sel_dict["nightly_diet_snowplow"]["definition"] + no_strings = sel_dict["nightly_diet_snowplow_full"]["definition"] + self.assertEqual(with_strings, no_strings) + + def test_single_string_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: nightly_selector + definition: + 'tag:nightly' + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "tag", "value": "nightly"} + definition = sel_dict["nightly_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_single_key_value_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: nightly_selector + definition: + tag: nightly + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "tag", "value": "nightly"} + definition = sel_dict["nightly_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_parent_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: kpi_nightly_selector + definition: + '+exposure:kpi_nightly' + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "exposure", "value": "kpi_nightly", "parents": True} + definition = sel_dict["kpi_nightly_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_plus_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: my_model_children_selector + definition: + 'my_model+2' + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "fqn", "value": "my_model", "children": True, "children_depth": "2"} + definition = sel_dict["my_model_children_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_selector_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: default + definition: + union: + - intersection: + - tag: foo + - tag: bar + - name: inherited + definition: + method: selector + value: default + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + definition = sel_dict["default"]["definition"] + expected = sel_dict["inherited"]["definition"] + self.assertEqual(expected, definition) + + def test_selector_definition_with_exclusion(self): + dct = get_selector_dict( + """\ + selectors: + - name: default + definition: + union: + - intersection: + - tag: foo + - tag: bar + - name: inherited + definition: + union: + - method: selector + value: default + - exclude: + - tag: bar + - name: comparison + definition: + union: + - union: + - intersection: + - tag: foo + - tag: bar + - exclude: + - tag: bar + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list((dct["selectors"])) + assert sel_dict + definition = sel_dict["inherited"]["definition"] + expected = sel_dict["comparison"]["definition"] + self.assertEqual(expected, definition) + + def test_missing_selector(self): + dct = get_selector_dict( + """\ + selectors: + - name: inherited + definition: + method: selector + value: default + """ + ) + with self.assertRaises(DbtSelectorsError) as err: + SelectorDict.parse_from_selectors_list((dct["selectors"])) + + self.assertEqual( + "Existing selector definition for default not found.", str(err.exception.msg) + ) diff --git a/tests/unit/config/test_utils.py b/tests/unit/config/test_utils.py new file mode 100644 index 00000000000..d2d7f99499d --- /dev/null +++ b/tests/unit/config/test_utils.py @@ -0,0 +1,41 @@ +import pytest + +from dbt.config.utils import exclusive_primary_alt_value_setting +from dbt.exceptions import DbtExclusivePropertyUseError + + +class TestExclusivePrimaryAltValueSetting: + @pytest.fixture(scope="class") + def primary_key(self) -> str: + return "key_a" + + @pytest.fixture(scope="class") + def alt_key(self) -> str: + return "key_b" + + @pytest.fixture(scope="class") + def value(self) -> str: + return "I LIKE CATS" + + def test_primary_set(self, primary_key: str, alt_key: str, value: str): + test_dict = {primary_key: value} + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + assert test_dict.get(primary_key) == value + assert test_dict.get(alt_key) is None + + def test_alt_set(self, primary_key: str, alt_key: str, value: str): + test_dict = {alt_key: value} + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + assert test_dict.get(primary_key) == value + assert test_dict.get(alt_key) == value + + def test_primary_and_alt_set(self, primary_key: str, alt_key: str, value: str): + test_dict = {primary_key: value, alt_key: value} + with pytest.raises(DbtExclusivePropertyUseError): + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + + def test_neither_primary_nor_alt_set(self, primary_key: str, alt_key: str): + test_dict = {} + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + assert test_dict.get(primary_key) is None + assert test_dict.get(alt_key) is None diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 5e9acb84907..f1823fb858f 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -5,7 +5,11 @@ from dbt.contracts.graph.nodes import SourceDefinition # All manifest related fixtures. +from tests.unit.utils.adapter import * # noqa +from tests.unit.utils.event_manager import * # noqa +from tests.unit.utils.flags import * # noqa from tests.unit.utils.manifest import * # noqa +from tests.unit.utils.project import * # noqa @pytest.fixture diff --git a/tests/unit/context/__init__.py b/tests/unit/context/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_base_context.py b/tests/unit/context/test_base.py similarity index 99% rename from tests/unit/test_base_context.py rename to tests/unit/context/test_base.py index 0dc2d93ddca..2246c1f77ff 100644 --- a/tests/unit/test_base_context.py +++ b/tests/unit/context/test_base.py @@ -1,8 +1,9 @@ import os -from dbt.context.base import BaseContext from jinja2.runtime import Undefined +from dbt.context.base import BaseContext + class TestBaseContext: def test_log_jinja_undefined(self): diff --git a/tests/unit/context/test_context.py b/tests/unit/context/test_context.py index 6070c24a1b7..10e591093ee 100644 --- a/tests/unit/context/test_context.py +++ b/tests/unit/context/test_context.py @@ -1,37 +1,27 @@ import os -from typing import Set, Dict, Any +from typing import Any, Dict, Set from unittest import mock import pytest -from dbt.adapters import postgres -from dbt.adapters import factory +import dbt_common.exceptions +from dbt.adapters import factory, postgres from dbt.clients.jinja import MacroStack +from dbt.config.project import VarProvider +from dbt.context import base, docs, macros, providers, query_header +from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import ( - ModelNode, - NodeConfig, DependsOn, Macro, + ModelNode, + NodeConfig, UnitTestNode, UnitTestOverrides, ) -from dbt.config.project import VarProvider -from dbt.context import base, providers, docs, macros, query_header -from dbt.contracts.files import FileHash -from dbt_common.events.functions import reset_metadata_vars -from dbt.flags import set_from_args from dbt.node_types import NodeType -import dbt_common.exceptions - -from tests.unit.utils import ( - config_from_parts_or_dicts, - inject_adapter, - clear_plugin, -) +from dbt_common.events.functions import reset_metadata_vars from tests.unit.mock_adapter import adapter_factory -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) +from tests.unit.utils import clear_plugin, config_from_parts_or_dicts, inject_adapter class TestVar: diff --git a/tests/unit/test_macro_resolver.py b/tests/unit/context/test_macro_resolver.py similarity index 100% rename from tests/unit/test_macro_resolver.py rename to tests/unit/context/test_macro_resolver.py index 57e8e9e47db..4611d4dc949 100644 --- a/tests/unit/test_macro_resolver.py +++ b/tests/unit/context/test_macro_resolver.py @@ -1,8 +1,8 @@ import unittest from unittest import mock -from dbt.contracts.graph.nodes import Macro from dbt.context.macro_resolver import MacroResolver +from dbt.contracts.graph.nodes import Macro def mock_macro(name, package_name): diff --git a/tests/unit/context/test_providers.py b/tests/unit/context/test_providers.py index 3deb226ccd0..224675143e4 100644 --- a/tests/unit/context/test_providers.py +++ b/tests/unit/context/test_providers.py @@ -1,9 +1,14 @@ -import pytest from unittest import mock +import pytest + from dbt.adapters.base import BaseRelation from dbt.artifacts.resources import Quoting -from dbt.context.providers import BaseResolver, RuntimeRefResolver, RuntimeSourceResolver +from dbt.context.providers import ( + BaseResolver, + RuntimeRefResolver, + RuntimeSourceResolver, +) class TestBaseResolver: @@ -63,6 +68,7 @@ def test_create_relation_with_empty(self, resolver, empty, is_ephemeral_model, e mock_node.quoting_dict = {} mock_node.alias = "test" mock_node.is_ephemeral_model = is_ephemeral_model + mock_node.defer_relation = None # create limited relation with mock.patch("dbt.contracts.graph.nodes.ParsedNode", new=mock.Mock): diff --git a/tests/unit/context/test_query_header.py b/tests/unit/context/test_query_header.py index aa9e99821a2..f14d28d40c4 100644 --- a/tests/unit/context/test_query_header.py +++ b/tests/unit/context/test_query_header.py @@ -1,15 +1,11 @@ -import pytest import re from unittest import mock +import pytest + from dbt.adapters.base.query_headers import MacroQueryStringSetter from dbt.context.query_header import generate_query_header_context - from tests.unit.utils import config_from_parts_or_dicts -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) class TestQueryHeaderContext: diff --git a/tests/unit/test_manifest.py b/tests/unit/contracts/graph/test_manifest.py similarity index 90% rename from tests/unit/test_manifest.py rename to tests/unit/contracts/graph/test_manifest.py index ea443d1147f..a945e7b5d3e 100644 --- a/tests/unit/test_manifest.py +++ b/tests/unit/contracts/graph/test_manifest.py @@ -9,23 +9,13 @@ import freezegun import pytest +from dbt_semantic_interfaces.type_enums import MetricType import dbt.flags import dbt.version +import dbt_common.invocation from dbt import tracking from dbt.adapters.base.plugin import AdapterPlugin -from dbt.contracts.files import FileHash -from dbt.contracts.graph.manifest import Manifest, ManifestMetadata -from dbt.contracts.graph.nodes import ( - ModelNode, - DependsOn, - ModelConfig, - SeedNode, - SourceDefinition, - Exposure, - Metric, - Group, -) from dbt.artifacts.resources import ( ExposureType, MaturityType, @@ -36,21 +26,31 @@ WhereFilter, WhereFilterIntersection, ) -import dbt_common.invocation -from dbt_common.events.functions import reset_metadata_vars +from dbt.contracts.files import FileHash +from dbt.contracts.graph.manifest import Manifest, ManifestMetadata +from dbt.contracts.graph.nodes import ( + DependsOn, + Exposure, + Group, + Metric, + ModelConfig, + ModelNode, + SeedNode, + SourceDefinition, +) from dbt.exceptions import AmbiguousResourceNameRefError from dbt.flags import set_from_args from dbt.node_types import NodeType -from dbt_semantic_interfaces.type_enums import MetricType - -from .utils import ( - MockMacro, +from dbt_common.events.functions import reset_metadata_vars +from tests.unit.utils import ( MockDocumentation, - MockSource, - MockNode, - MockMaterialization, MockGenerateMacro, + MockMacro, + MockMaterialization, + MockNode, + MockSource, inject_plugin, + make_manifest, ) REQUIRED_PARSED_NODE_KEYS = frozenset( @@ -75,13 +75,13 @@ "raw_code", "language", "description", + "primary_key", "columns", "fqn", "build_path", "compiled_path", "patch_path", "docs", - "deferred", "checksum", "unrendered_config", "created_at", @@ -1041,16 +1041,15 @@ def test_merge_from_artifact(self): original_manifest = Manifest(nodes=original_nodes) other_manifest = Manifest(nodes=other_nodes) - adapter = mock.MagicMock() - original_manifest.merge_from_artifact(adapter, other_manifest, {}) + original_manifest.merge_from_artifact(other_manifest) # new node added should not be in original manifest assert "model.root.nested2" not in original_manifest.nodes - # old node removed should not have state relation in original manifest + # old node removed should not have defer_relation in original manifest assert original_manifest.nodes["model.root.nested"].defer_relation is None - # for all other nodes, check that state relation is updated + # for all other nodes, check that defer_relation is updated for k, v in original_manifest.nodes.items(): if v.defer_relation: self.assertEqual("other_" + v.database, v.defer_relation.database) @@ -1093,20 +1092,6 @@ def setUp(self): ) -def make_manifest(nodes=[], sources=[], macros=[], docs=[]): - return Manifest( - nodes={n.unique_id: n for n in nodes}, - macros={m.unique_id: m for m in macros}, - sources={s.unique_id: s for s in sources}, - docs={d.unique_id: d for d in docs}, - disabled={}, - files={}, - exposures={}, - metrics={}, - selectors={}, - ) - - FindMacroSpec = namedtuple("FindMacroSpec", "macros,expected") macro_parameter_sets = [ @@ -1240,7 +1225,7 @@ def test_find_generate_macros_by_name(macros, expectations): FindMaterializationSpec = namedtuple("FindMaterializationSpec", "macros,adapter_type,expected") -def _materialization_parameter_sets(): +def _materialization_parameter_sets_legacy(): # inject the plugins used for materialization parameter tests FooPlugin = AdapterPlugin( adapter=mock.MagicMock(), @@ -1386,12 +1371,187 @@ def id_mat(arg): return "_".join(arg) +@pytest.mark.parametrize( + "macros,adapter_type,expected", + _materialization_parameter_sets_legacy(), + ids=id_mat, +) +def test_find_materialization_by_name_legacy(macros, adapter_type, expected): + set_from_args( + Namespace( + SEND_ANONYMOUS_USAGE_STATS=False, + REQUIRE_EXPLICIT_PACKAGE_OVERRIDES_FOR_BUILTIN_MATERIALIZATIONS=False, + ), + None, + ) + + manifest = make_manifest(macros=macros) + result = manifest.find_materialization_macro_by_name( + project_name="root", + materialization_name="my_materialization", + adapter_type=adapter_type, + ) + if expected is None: + assert result is expected + else: + expected_package, expected_adapter_type = expected + assert result.adapter_type == expected_adapter_type + assert result.package_name == expected_package + + +def _materialization_parameter_sets(): + # inject the plugins used for materialization parameter tests + FooPlugin = AdapterPlugin( + adapter=mock.MagicMock(), + credentials=mock.MagicMock(), + include_path="/path/to/root/plugin", + project_name="foo", + ) + FooPlugin.adapter.type.return_value = "foo" + inject_plugin(FooPlugin) + + BarPlugin = AdapterPlugin( + adapter=mock.MagicMock(), + credentials=mock.MagicMock(), + include_path="/path/to/root/plugin", + dependencies=["foo"], + project_name="bar", + ) + BarPlugin.adapter.type.return_value = "bar" + inject_plugin(BarPlugin) + + sets = [ + FindMaterializationSpec(macros=[], adapter_type="foo", expected=None), + ] + + # default only, each project + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type=None)], + adapter_type="foo", + expected=(project, "default"), + ) + for project in ["root", "dep", "dbt"] + ) + + # other type only, each project + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type="bar")], + adapter_type="foo", + expected=None, + ) + for project in ["root", "dep", "dbt"] + ) + + # matching type only, each project + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type="foo")], + adapter_type="foo", + expected=(project, "foo"), + ) + for project in ["root", "dep", "dbt"] + ) + + sets.extend( + [ + # matching type and default everywhere + FindMaterializationSpec( + macros=[ + MockMaterialization(project, adapter_type=atype) + for (project, atype) in product(["root", "dep", "dbt"], ["foo", None]) + ], + adapter_type="foo", + expected=("root", "foo"), + ), + # default in core, override is in dep, and root has unrelated override + # should find the dbt default because default materializations cannot be overwritten by packages. + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type="bar"), + MockMaterialization("dep", adapter_type="foo"), + MockMaterialization("dbt", adapter_type=None), + ], + adapter_type="foo", + expected=("dbt", "default"), + ), + # default in core, unrelated override is in dep, and root has an override + # should find the root override. + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type="foo"), + MockMaterialization("dep", adapter_type="bar"), + MockMaterialization("dbt", adapter_type=None), + ], + adapter_type="foo", + expected=("root", "foo"), + ), + # default in core, override is in dep, and root has an override too. + # should find the root override. + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type="foo"), + MockMaterialization("dep", adapter_type="foo"), + MockMaterialization("dbt", adapter_type=None), + ], + adapter_type="foo", + expected=("root", "foo"), + ), + # core has default + adapter, dep has adapter, root has default + # should find the default adapter implementation, because it's the most specific + # and default materializations cannot be overwritten by packages + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type=None), + MockMaterialization("dep", adapter_type="foo"), + MockMaterialization("dbt", adapter_type=None), + MockMaterialization("dbt", adapter_type="foo"), + ], + adapter_type="foo", + expected=("dbt", "foo"), + ), + ] + ) + + # inherit from parent adapter + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type="foo")], + adapter_type="bar", + expected=(project, "foo"), + ) + for project in ["root", "dep", "dbt"] + ) + sets.extend( + FindMaterializationSpec( + macros=[ + MockMaterialization(project, adapter_type="foo"), + MockMaterialization(project, adapter_type="bar"), + ], + adapter_type="bar", + expected=(project, "bar"), + ) + for project in ["root", "dep", "dbt"] + ) + + return sets + + @pytest.mark.parametrize( "macros,adapter_type,expected", _materialization_parameter_sets(), ids=id_mat, ) def test_find_materialization_by_name(macros, adapter_type, expected): + set_from_args( + Namespace( + SEND_ANONYMOUS_USAGE_STATS=False, + REQUIRE_EXPLICIT_PACKAGE_OVERRIDES_FOR_BUILTIN_MATERIALIZATIONS=True, + ), + None, + ) + manifest = make_manifest(macros=macros) result = manifest.find_materialization_macro_by_name( project_name="root", diff --git a/tests/unit/test_contracts_graph_node_args.py b/tests/unit/contracts/graph/test_node_args.py similarity index 100% rename from tests/unit/test_contracts_graph_node_args.py rename to tests/unit/contracts/graph/test_node_args.py diff --git a/tests/unit/test_contracts_graph_compiled.py b/tests/unit/contracts/graph/test_nodes.py similarity index 72% rename from tests/unit/test_contracts_graph_compiled.py rename to tests/unit/contracts/graph/test_nodes.py index 96fbeb54090..a498b99dcbc 100644 --- a/tests/unit/test_contracts_graph_compiled.py +++ b/tests/unit/contracts/graph/test_nodes.py @@ -1,28 +1,35 @@ import pickle -import pytest - +import re from dataclasses import replace -from dbt.artifacts.resources import ColumnInfo +import pytest + +from dbt.artifacts.resources import ColumnInfo, TestConfig, TestMetadata +from dbt.compilation import inject_ctes_into_sql from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import ( DependsOn, GenericTestNode, - ModelNode, + InjectedCTE, ModelConfig, + ModelNode, ) -from dbt.artifacts.resources import TestConfig, TestMetadata -from tests.unit.fixtures import generic_test_node, model_node from dbt.node_types import NodeType - -from .utils import ( - assert_symmetric, - assert_from_dict, +from tests.unit.fixtures import generic_test_node, model_node +from tests.unit.utils import ( assert_fails_validation, + assert_from_dict, + assert_symmetric, replace_config, ) +def norm_whitespace(string): + _RE_COMBINE_WHITESPACE = re.compile(r"\s+") + string = _RE_COMBINE_WHITESPACE.sub(" ", string).strip() + return string + + @pytest.fixture def basic_uncompiled_model(): return ModelNode( @@ -39,7 +46,6 @@ def basic_uncompiled_model(): sources=[], metrics=[], depends_on=DependsOn(), - deferred=False, description="", database="test_db", schema="test_schema", @@ -103,7 +109,6 @@ def basic_uncompiled_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": False, "description": "", "schema": "test_schema", "alias": "bar", @@ -156,8 +161,8 @@ def basic_compiled_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": True, "description": "", + "primary_key": [], "schema": "test_schema", "alias": "bar", "tags": [], @@ -383,7 +388,6 @@ def basic_uncompiled_schema_test_node(): refs=[], sources=[], metrics=[], - deferred=False, depends_on=DependsOn(), description="", database="test_db", @@ -439,7 +443,6 @@ def basic_uncompiled_schema_test_dict(): "fail_calc": "count(*)", "meta": {}, }, - "deferred": False, "docs": {"show": True}, "columns": {}, "meta": {}, @@ -476,7 +479,6 @@ def basic_compiled_schema_test_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "database": "test_db", "description": "", "schema": "dbt_test__audit", @@ -614,3 +616,191 @@ def test_compare_to_compiled(basic_uncompiled_schema_test_node, basic_compiled_s compiled, config=fixed_config, unrendered_config=uncompiled.unrendered_config ) assert uncompiled.same_contents(fixed_compiled, "postgres") + + +def test_inject_ctes_simple1(): + starting_sql = "select * from __dbt__cte__base" + ctes = [ + InjectedCTE( + id="model.test.base", + sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", + ) + ] + expected_sql = """with __dbt__cte__base as ( + select * from test16873767336887004702_test_ephemeral.seed + ) select * from __dbt__cte__base""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_simple2(): + starting_sql = "select * from __dbt__cte__ephemeral_level_two" + ctes = [ + InjectedCTE( + id="model.test.ephemeral_level_two", + sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873757769710148165_test_ephemeral"."source_table"\n)', + ) + ] + expected_sql = """with __dbt__cte__ephemeral_level_two as ( + select * from "dbt"."test16873757769710148165_test_ephemeral"."source_table" + ) select * from __dbt__cte__ephemeral_level_two""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_multiple_ctes(): + + starting_sql = "select * from __dbt__cte__ephemeral" + ctes = [ + InjectedCTE( + id="model.test.ephemeral_level_two", + sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873735573223965828_test_ephemeral"."source_table"\n)', + ), + InjectedCTE( + id="model.test.ephemeral", + sql=" __dbt__cte__ephemeral as (\n\nselect * from __dbt__cte__ephemeral_level_two\n)", + ), + ] + expected_sql = """with __dbt__cte__ephemeral_level_two as ( + select * from "dbt"."test16873735573223965828_test_ephemeral"."source_table" + ), __dbt__cte__ephemeral as ( + select * from __dbt__cte__ephemeral_level_two + ) select * from __dbt__cte__ephemeral""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_multiple_ctes_more_complex(): + starting_sql = """select * from __dbt__cte__female_only + union all + select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" + ctes = [ + InjectedCTE( + id="model.test.base", + sql=" __dbt__cte__base as (\n\n\nselect * from test16873757723266827902_test_ephemeral.seed\n)", + ), + InjectedCTE( + id="model.test.base_copy", + sql=" __dbt__cte__base_copy as (\n\n\nselect * from __dbt__cte__base\n)", + ), + InjectedCTE( + id="model.test.female_only", + sql=" __dbt__cte__female_only as (\n\n\nselect * from __dbt__cte__base_copy where gender = 'Female'\n)", + ), + ] + expected_sql = """with __dbt__cte__base as ( + select * from test16873757723266827902_test_ephemeral.seed + ), __dbt__cte__base_copy as ( + select * from __dbt__cte__base + ), __dbt__cte__female_only as ( + select * from __dbt__cte__base_copy where gender = 'Female' + ) select * from __dbt__cte__female_only + union all + select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_starting_with1(): + starting_sql = """ + with internal_cte as (select * from sessions) + select * from internal_cte + """ + ctes = [ + InjectedCTE( + id="cte_id_1", + sql="__dbt__cte__ephemeral as (select * from table)", + ), + InjectedCTE( + id="cte_id_2", + sql="__dbt__cte__events as (select id, type from events)", + ), + ] + expected_sql = """with __dbt__cte__ephemeral as (select * from table), + __dbt__cte__events as (select id, type from events), + internal_cte as (select * from sessions) + select * from internal_cte""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_starting_with2(): + starting_sql = """with my_other_cool_cte as ( + select id, name from __dbt__cte__ephemeral + where id > 1000 + ) + select name, id from my_other_cool_cte""" + ctes = [ + InjectedCTE( + id="model.singular_tests_ephemeral.ephemeral", + sql=' __dbt__cte__ephemeral as (\n\n\nwith my_cool_cte as (\n select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base"\n)\nselect id, name from my_cool_cte where id is not null\n)', + ) + ] + expected_sql = """with __dbt__cte__ephemeral as ( + with my_cool_cte as ( + select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base" + ) + select id, name from my_cool_cte where id is not null + ), my_other_cool_cte as ( + select id, name from __dbt__cte__ephemeral + where id > 1000 + ) + select name, id from my_other_cool_cte""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_comment_with(): + # Test injection with a comment containing "with" + starting_sql = """ + --- This is sql with a comment + select * from __dbt__cte__base + """ + ctes = [ + InjectedCTE( + id="model.test.base", + sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", + ) + ] + expected_sql = """with __dbt__cte__base as ( + select * from test16873767336887004702_test_ephemeral.seed + ) --- This is sql with a comment + select * from __dbt__cte__base""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_with_recursive(): + # Test injection with "recursive" keyword + starting_sql = """ + with recursive t(n) as ( + select * from __dbt__cte__first_ephemeral_model + union all + select n+1 from t where n < 100 + ) + select sum(n) from t + """ + ctes = [ + InjectedCTE( + id="model.test.first_ephemeral_model", + sql=" __dbt__cte__first_ephemeral_model as (\n\nselect 1 as fun\n)", + ) + ] + expected_sql = """with recursive __dbt__cte__first_ephemeral_model as ( + select 1 as fun + ), t(n) as ( + select * from __dbt__cte__first_ephemeral_model + union all + select n+1 from t where n < 100 + ) + select sum(n) from t + """ + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) diff --git a/tests/unit/test_contracts_graph_parsed.py b/tests/unit/contracts/graph/test_nodes_parsed.py similarity index 99% rename from tests/unit/test_contracts_graph_parsed.py rename to tests/unit/contracts/graph/test_nodes_parsed.py index e655f0775ff..4e8392cb3df 100644 --- a/tests/unit/test_contracts_graph_parsed.py +++ b/tests/unit/contracts/graph/test_nodes_parsed.py @@ -1,7 +1,9 @@ import pickle -import pytest - +from argparse import Namespace from dataclasses import replace + +import pytest +from dbt_semantic_interfaces.type_enums import MetricType from hypothesis import given from hypothesis.strategies import builds, lists @@ -12,6 +14,8 @@ ExposureConfig, ExposureType, FreshnessThreshold, + Hook, + MacroDependsOn, MaturityType, Measure, MetricInputMeasure, @@ -19,56 +23,52 @@ Owner, Quoting, RefArgs, - MacroDependsOn, - TestMetadata, SourceConfig, - Time, - Hook, ) +from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource +from dbt.artifacts.resources import TestMetadata, Time from dbt.artifacts.resources.types import TimePeriod -from dbt.node_types import NodeType, AccessType from dbt.contracts.files import FileHash from dbt.contracts.graph.model_config import ( + EmptySnapshotConfig, + ModelConfig, NodeConfig, SeedConfig, - TestConfig, SnapshotConfig, - EmptySnapshotConfig, - ModelConfig, + TestConfig, ) from dbt.contracts.graph.nodes import ( - ModelNode, DependsOn, + Docs, + Documentation, + Exposure, GenericTestNode, - SnapshotNode, + HookNode, IntermediateSnapshotNode, Macro, - Exposure, Metric, + ModelNode, SeedNode, - Docs, - SourceDefinition, - Documentation, - HookNode, SemanticModel, + SnapshotNode, + SourceDefinition, ) -from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource -from dbt import flags -from argparse import Namespace - +from dbt.node_types import AccessType, NodeType from dbt_common.dataclass_schema import ValidationError -from dbt_semantic_interfaces.type_enums import MetricType -from .utils import ( +from tests.unit.utils import ( ContractTestCase, - assert_symmetric, + assert_fails_validation, assert_from_dict, + assert_symmetric, compare_dicts, - assert_fails_validation, dict_replace, replace_config, ) -flags.set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) + +@pytest.fixture +def flags_for_args() -> Namespace: + return Namespace(SEND_ANONYMOUS_USAGE_STATS=False) @pytest.fixture @@ -168,6 +168,7 @@ def base_parsed_model_dict(): "depends_on": {"macros": [], "nodes": []}, "database": "test_db", "description": "", + "primary_key": [], "schema": "test_schema", "alias": "bar", "tags": [], @@ -189,7 +190,6 @@ def base_parsed_model_dict(): "packages": [], "access": "protected", }, - "deferred": False, "docs": {"show": True}, "contract": {"enforced": False, "alias_types": True}, "columns": {}, @@ -222,6 +222,7 @@ def basic_parsed_model_object(): metrics=[], depends_on=DependsOn(), description="", + primary_key=[], database="test_db", schema="test_schema", alias="bar", @@ -275,8 +276,8 @@ def complex_parsed_model_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.bar"]}, "database": "test_db", - "deferred": True, "description": "My parsed node", + "primary_key": [], "schema": "test_schema", "alias": "bar", "tags": ["tag"], @@ -341,7 +342,6 @@ def complex_parsed_model_object(): sources=[], metrics=[], depends_on=DependsOn(nodes=["model.test.bar"]), - deferred=True, description="My parsed node", database="test_db", schema="test_schema", @@ -523,7 +523,6 @@ def basic_parsed_seed_dict(): "contract": {"enforced": False, "alias_types": True}, "packages": [], }, - "deferred": False, "docs": {"show": True}, "columns": {}, "meta": {}, @@ -551,7 +550,6 @@ def basic_parsed_seed_object(): alias="foo", config=SeedConfig(), # config=SeedConfig(quote_columns=True), - deferred=False, docs=Docs(show=True), columns={}, meta={}, @@ -616,7 +614,6 @@ def complex_parsed_seed_dict(): "contract": {"enforced": False, "alias_types": True}, "packages": [], }, - "deferred": False, "docs": {"show": True}, "columns": { "a": { @@ -661,7 +658,6 @@ def complex_parsed_seed_object(): delimiter=",", persist_docs={"relation": True, "columns": True}, ), - deferred=False, docs=Docs(show=True), columns={"a": ColumnInfo(name="a", description="a column description")}, meta={"foo": 1000}, @@ -804,7 +800,6 @@ def base_parsed_hook_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": False, "description": "", "schema": "test_schema", "alias": "bar", @@ -856,7 +851,6 @@ def base_parsed_hook_object(): metrics=[], depends_on=DependsOn(), description="", - deferred=False, database="test_db", schema="test_schema", alias="bar", @@ -885,7 +879,6 @@ def complex_parsed_hook_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.bar"]}, - "deferred": False, "database": "test_db", "description": "My parsed node", "schema": "test_schema", @@ -950,7 +943,6 @@ def complex_parsed_hook_object(): metrics=[], depends_on=DependsOn(nodes=["model.test.bar"]), description="My parsed node", - deferred=False, database="test_db", schema="test_schema", alias="bar", @@ -1044,7 +1036,6 @@ def basic_parsed_schema_test_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "database": "test_db", "description": "", "schema": "test_schema", @@ -1124,7 +1115,6 @@ def complex_parsed_schema_test_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.bar"]}, "database": "test_db", - "deferred": False, "description": "My parsed node", "schema": "test_schema", "alias": "bar", @@ -1497,7 +1487,6 @@ def basic_timestamp_snapshot_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "database": "test_db", "description": "", "schema": "test_schema", @@ -1646,7 +1635,6 @@ def basic_check_snapshot_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": False, "description": "", "schema": "test_schema", "alias": "bar", diff --git a/tests/unit/contracts/graph/test_semantic_manifest.py b/tests/unit/contracts/graph/test_semantic_manifest.py index 4eb389ea0f5..cf3121dc9b0 100644 --- a/tests/unit/contracts/graph/test_semantic_manifest.py +++ b/tests/unit/contracts/graph/test_semantic_manifest.py @@ -1,4 +1,5 @@ import pytest + from dbt.contracts.graph.semantic_manifest import SemanticManifest diff --git a/tests/unit/contracts/graph/test_unparsed.py b/tests/unit/contracts/graph/test_unparsed.py index f613a358e7c..90fa2fcf8a7 100644 --- a/tests/unit/contracts/graph/test_unparsed.py +++ b/tests/unit/contracts/graph/test_unparsed.py @@ -1,8 +1,991 @@ +import pickle +from datetime import timedelta + import pytest -from dbt.contracts.graph.unparsed import UnparsedColumn, HasColumnTests +from dbt.artifacts.resources import ( + ExposureType, + FreshnessThreshold, + MaturityType, + Owner, + Quoting, + Time, +) +from dbt.artifacts.resources.types import TimePeriod +from dbt.artifacts.schemas.results import FreshnessStatus +from dbt.contracts.graph.unparsed import ( + Docs, + HasColumnTests, + UnparsedColumn, + UnparsedDocumentationFile, + UnparsedExposure, + UnparsedMacro, + UnparsedMetric, + UnparsedMetricInputMeasure, + UnparsedMetricTypeParams, + UnparsedModelUpdate, + UnparsedNode, + UnparsedNodeUpdate, + UnparsedRunHook, + UnparsedSourceDefinition, + UnparsedSourceTableDefinition, + UnparsedVersion, +) from dbt.exceptions import ParsingError +from dbt.node_types import NodeType from dbt.parser.schemas import ParserRef +from tests.unit.utils import ContractTestCase + + +class TestUnparsedMacro(ContractTestCase): + ContractType = UnparsedMacro + + def test_ok(self): + macro_dict = { + "path": "/root/path.sql", + "original_file_path": "/root/path.sql", + "package_name": "test", + "language": "sql", + "raw_code": "{% macro foo() %}select 1 as id{% endmacro %}", + "resource_type": "macro", + } + macro = self.ContractType( + path="/root/path.sql", + original_file_path="/root/path.sql", + package_name="test", + language="sql", + raw_code="{% macro foo() %}select 1 as id{% endmacro %}", + resource_type=NodeType.Macro, + ) + self.assert_symmetric(macro, macro_dict) + pickle.loads(pickle.dumps(macro)) + + def test_invalid_missing_field(self): + macro_dict = { + "path": "/root/path.sql", + "original_file_path": "/root/path.sql", + # 'package_name': 'test', + "language": "sql", + "raw_code": "{% macro foo() %}select 1 as id{% endmacro %}", + "resource_type": "macro", + } + self.assert_fails_validation(macro_dict) + + def test_invalid_extra_field(self): + macro_dict = { + "path": "/root/path.sql", + "original_file_path": "/root/path.sql", + "package_name": "test", + "language": "sql", + "raw_code": "{% macro foo() %}select 1 as id{% endmacro %}", + "extra": "extra", + "resource_type": "macro", + } + self.assert_fails_validation(macro_dict) + + +class TestUnparsedNode(ContractTestCase): + ContractType = UnparsedNode + + def test_ok(self): + node_dict = { + "name": "foo", + "resource_type": NodeType.Model, + "path": "/root/x/path.sql", + "original_file_path": "/root/path.sql", + "package_name": "test", + "language": "sql", + "raw_code": 'select * from {{ ref("thing") }}', + } + node = self.ContractType( + package_name="test", + path="/root/x/path.sql", + original_file_path="/root/path.sql", + language="sql", + raw_code='select * from {{ ref("thing") }}', + name="foo", + resource_type=NodeType.Model, + ) + self.assert_symmetric(node, node_dict) + self.assertFalse(node.empty) + + self.assert_fails_validation(node_dict, cls=UnparsedRunHook) + self.assert_fails_validation(node_dict, cls=UnparsedMacro) + pickle.loads(pickle.dumps(node)) + + def test_empty(self): + node_dict = { + "name": "foo", + "resource_type": NodeType.Model, + "path": "/root/x/path.sql", + "original_file_path": "/root/path.sql", + "package_name": "test", + "language": "sql", + "raw_code": " \n", + } + node = UnparsedNode( + package_name="test", + path="/root/x/path.sql", + original_file_path="/root/path.sql", + language="sql", + raw_code=" \n", + name="foo", + resource_type=NodeType.Model, + ) + self.assert_symmetric(node, node_dict) + self.assertTrue(node.empty) + + self.assert_fails_validation(node_dict, cls=UnparsedRunHook) + self.assert_fails_validation(node_dict, cls=UnparsedMacro) + + +class TestUnparsedRunHook(ContractTestCase): + ContractType = UnparsedRunHook + + def test_ok(self): + node_dict = { + "name": "foo", + "resource_type": NodeType.Operation, + "path": "/root/dbt_project.yml", + "original_file_path": "/root/dbt_project.yml", + "package_name": "test", + "language": "sql", + "raw_code": "GRANT select on dbt_postgres", + "index": 4, + } + node = self.ContractType( + package_name="test", + path="/root/dbt_project.yml", + original_file_path="/root/dbt_project.yml", + language="sql", + raw_code="GRANT select on dbt_postgres", + name="foo", + resource_type=NodeType.Operation, + index=4, + ) + self.assert_symmetric(node, node_dict) + self.assert_fails_validation(node_dict, cls=UnparsedNode) + pickle.loads(pickle.dumps(node)) + + def test_bad_type(self): + node_dict = { + "name": "foo", + "resource_type": NodeType.Model, # invalid + "path": "/root/dbt_project.yml", + "original_file_path": "/root/dbt_project.yml", + "package_name": "test", + "language": "sql", + "raw_code": "GRANT select on dbt_postgres", + "index": 4, + } + self.assert_fails_validation(node_dict) + + +class TestFreshnessThreshold(ContractTestCase): + ContractType = FreshnessThreshold + + def test_empty(self): + empty = self.ContractType() + self.assert_symmetric(empty, {"error_after": {}, "warn_after": {}}) + self.assertEqual(empty.status(float("Inf")), FreshnessStatus.Pass) + self.assertEqual(empty.status(0), FreshnessStatus.Pass) + + def test_both(self): + threshold = self.ContractType( + warn_after=Time(count=18, period=TimePeriod.hour), + error_after=Time(count=2, period=TimePeriod.day), + ) + dct = { + "error_after": {"count": 2, "period": "day"}, + "warn_after": {"count": 18, "period": "hour"}, + } + self.assert_symmetric(threshold, dct) + + error_seconds = timedelta(days=3).total_seconds() + warn_seconds = timedelta(days=1).total_seconds() + pass_seconds = timedelta(hours=3).total_seconds() + self.assertEqual(threshold.status(error_seconds), FreshnessStatus.Error) + self.assertEqual(threshold.status(warn_seconds), FreshnessStatus.Warn) + self.assertEqual(threshold.status(pass_seconds), FreshnessStatus.Pass) + pickle.loads(pickle.dumps(threshold)) + + def test_merged(self): + t1 = self.ContractType( + warn_after=Time(count=36, period=TimePeriod.hour), + error_after=Time(count=2, period=TimePeriod.day), + ) + t2 = self.ContractType( + warn_after=Time(count=18, period=TimePeriod.hour), + ) + threshold = self.ContractType( + warn_after=Time(count=18, period=TimePeriod.hour), + error_after=Time(count=None, period=None), + ) + self.assertEqual(threshold, t1.merged(t2)) + + warn_seconds = timedelta(days=1).total_seconds() + pass_seconds = timedelta(hours=3).total_seconds() + self.assertEqual(threshold.status(warn_seconds), FreshnessStatus.Warn) + self.assertEqual(threshold.status(pass_seconds), FreshnessStatus.Pass) + + +class TestQuoting(ContractTestCase): + ContractType = Quoting + + def test_empty(self): + empty = self.ContractType() + self.assert_symmetric(empty, {}) + + def test_partial(self): + a = self.ContractType(None, True, False) + b = self.ContractType(True, False, None) + self.assert_symmetric(a, {"schema": True, "identifier": False}) + self.assert_symmetric(b, {"database": True, "schema": False}) + + c = a.merged(b) + self.assertEqual(c, self.ContractType(True, False, False)) + self.assert_symmetric(c, {"database": True, "schema": False, "identifier": False}) + pickle.loads(pickle.dumps(c)) + + +class TestUnparsedSourceDefinition(ContractTestCase): + ContractType = UnparsedSourceDefinition + + def test_defaults(self): + minimum = self.ContractType(name="foo") + from_dict = {"name": "foo"} + to_dict = { + "name": "foo", + "description": "", + "freshness": {"error_after": {}, "warn_after": {}}, + "quoting": {}, + "tables": [], + "loader": "", + "meta": {}, + "tags": [], + "config": {}, + } + self.assert_from_dict(minimum, from_dict) + self.assert_to_dict(minimum, to_dict) + + def test_contents(self): + empty = self.ContractType( + name="foo", + description="a description", + quoting=Quoting(database=False), + loader="some_loader", + freshness=FreshnessThreshold(), + tables=[], + meta={}, + ) + dct = { + "name": "foo", + "description": "a description", + "quoting": {"database": False}, + "loader": "some_loader", + "freshness": {"error_after": {}, "warn_after": {}}, + "tables": [], + "meta": {}, + "tags": [], + "config": {}, + } + self.assert_symmetric(empty, dct) + + def test_table_defaults(self): + table_1 = UnparsedSourceTableDefinition(name="table1") + table_2 = UnparsedSourceTableDefinition( + name="table2", + description="table 2", + quoting=Quoting(database=True), + ) + source = self.ContractType(name="foo", tables=[table_1, table_2]) + from_dict = { + "name": "foo", + "tables": [ + {"name": "table1"}, + { + "name": "table2", + "description": "table 2", + "quoting": {"database": True}, + }, + ], + } + to_dict = { + "name": "foo", + "description": "", + "config": {}, + "loader": "", + "freshness": {"error_after": {}, "warn_after": {}}, + "quoting": {}, + "meta": {}, + "tables": [ + { + "name": "table1", + "description": "", + "config": {}, + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "columns": [], + "constraints": [], + "quoting": {}, + "freshness": {"error_after": {}, "warn_after": {}}, + "meta": {}, + "tags": [], + }, + { + "name": "table2", + "description": "table 2", + "config": {}, + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "columns": [], + "constraints": [], + "quoting": {"database": True}, + "freshness": {"error_after": {}, "warn_after": {}}, + "meta": {}, + "tags": [], + }, + ], + "tags": [], + } + self.assert_from_dict(source, from_dict) + self.assert_symmetric(source, to_dict) + pickle.loads(pickle.dumps(source)) + + +class TestUnparsedDocumentationFile(ContractTestCase): + ContractType = UnparsedDocumentationFile + + def test_ok(self): + doc = self.ContractType( + package_name="test", + path="/root/docs", + original_file_path="/root/docs/doc.md", + file_contents="blah blah blah", + ) + doc_dict = { + "package_name": "test", + "path": "/root/docs", + "original_file_path": "/root/docs/doc.md", + "file_contents": "blah blah blah", + } + self.assert_symmetric(doc, doc_dict) + self.assertEqual(doc.resource_type, NodeType.Documentation) + self.assert_fails_validation(doc_dict, UnparsedNode) + pickle.loads(pickle.dumps(doc)) + + def test_extra_field(self): + self.assert_fails_validation({}) + doc_dict = { + "package_name": "test", + "path": "/root/docs", + "original_file_path": "/root/docs/doc.md", + "file_contents": "blah blah blah", + "resource_type": "docs", + } + self.assert_fails_validation(doc_dict) + + +class TestUnparsedNodeUpdate(ContractTestCase): + ContractType = UnparsedNodeUpdate + + def test_defaults(self): + minimum = self.ContractType( + name="foo", + yaml_key="models", + original_file_path="/some/fake/path", + package_name="test", + ) + from_dict = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + } + to_dict = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "columns": [], + "description": "", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {}, + "config": {}, + "constraints": [], + } + self.assert_from_dict(minimum, from_dict) + self.assert_to_dict(minimum, to_dict) + + def test_contents(self): + update = self.ContractType( + name="foo", + yaml_key="models", + original_file_path="/some/fake/path", + package_name="test", + description="a description", + data_tests=["table_test"], + meta={"key": ["value1", "value2"]}, + columns=[ + UnparsedColumn( + name="x", + description="x description", + meta={"key2": "value3"}, + ), + UnparsedColumn( + name="y", + description="y description", + data_tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], + meta={}, + tags=["a", "b"], + ), + ], + docs=Docs(show=False), + ) + dct = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "constraints": [], + "columns": [ + { + "name": "x", + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + "tags": [], + "constraints": [], + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "tags": ["a", "b"], + "constraints": [], + }, + ], + "docs": {"show": False}, + "config": {}, + } + self.assert_symmetric(update, dct) + pickle.loads(pickle.dumps(update)) + + def test_bad_test_type(self): + dct = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "columns": [ + { + "name": "x", + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "yaml_key": "models", + "original_file_path": "/some/fake/path", + }, + ], + "docs": {"show": True}, + } + self.assert_fails_validation(dct) + + dct = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "columns": [ + # column missing a name + { + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "yaml_key": "models", + "original_file_path": "/some/fake/path", + }, + ], + "docs": {"show": True}, + } + self.assert_fails_validation(dct) + + # missing a name + dct = { + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "columns": [ + { + "name": "x", + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "yaml_key": "models", + "original_file_path": "/some/fake/path", + }, + ], + "docs": {"show": True}, + } + self.assert_fails_validation(dct) + + +class TestUnparsedModelUpdate(ContractTestCase): + ContractType = UnparsedModelUpdate + + def test_defaults(self): + minimum = self.ContractType( + name="foo", + yaml_key="models", + original_file_path="/some/fake/path", + package_name="test", + ) + from_dict = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + } + to_dict = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "columns": [], + "description": "", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {}, + "config": {}, + "constraints": [], + "versions": [], + } + self.assert_from_dict(minimum, from_dict) + self.assert_to_dict(minimum, to_dict) + + def test_contents(self): + update = self.ContractType( + name="foo", + yaml_key="models", + original_file_path="/some/fake/path", + package_name="test", + description="a description", + data_tests=["table_test"], + meta={"key": ["value1", "value2"]}, + columns=[ + UnparsedColumn( + name="x", + description="x description", + meta={"key2": "value3"}, + ), + UnparsedColumn( + name="y", + description="y description", + data_tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], + meta={}, + tags=["a", "b"], + ), + ], + docs=Docs(show=False), + versions=[UnparsedVersion(v=2)], + ) + dct = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "constraints": [], + "versions": [ + { + "v": 2, + "description": "", + "columns": [], + "config": {}, + "constraints": [], + "docs": {"show": True}, + } + ], + "columns": [ + { + "name": "x", + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + "tags": [], + "constraints": [], + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "tags": ["a", "b"], + "constraints": [], + }, + ], + "docs": {"show": False}, + "config": {}, + } + self.assert_symmetric(update, dct) + pickle.loads(pickle.dumps(update)) + + def test_bad_test_type(self): + dct = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "columns": [ + { + "name": "x", + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "yaml_key": "models", + "original_file_path": "/some/fake/path", + }, + ], + "docs": {"show": True}, + } + self.assert_fails_validation(dct) + + dct = { + "name": "foo", + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "columns": [ + # column missing a name + { + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "yaml_key": "models", + "original_file_path": "/some/fake/path", + }, + ], + "docs": {"show": True}, + } + self.assert_fails_validation(dct) + + # missing a name + dct = { + "yaml_key": "models", + "original_file_path": "/some/fake/path", + "package_name": "test", + "description": "a description", + "data_tests": ["table_test"], + "tests": [], + "meta": {"key": ["value1", "value2"]}, + "columns": [ + { + "name": "x", + "description": "x description", + "docs": {"show": True}, + "data_tests": [], + "tests": [], + "meta": {"key2": "value3"}, + }, + { + "name": "y", + "description": "y description", + "docs": {"show": True}, + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], + "meta": {}, + "yaml_key": "models", + "original_file_path": "/some/fake/path", + }, + ], + "docs": {"show": True}, + } + self.assert_fails_validation(dct) + + +class TestUnparsedExposure(ContractTestCase): + ContractType = UnparsedExposure + + def get_ok_dict(self): + return { + "name": "my_exposure", + "type": "dashboard", + "owner": {"name": "example", "email": "name@example.com", "slack": "#channel"}, + "maturity": "medium", + "meta": {"tool": "my_tool"}, + "tags": ["my_department"], + "url": "https://example.com/dashboards/1", + "description": "A exposure", + "config": {}, + "depends_on": [ + 'ref("my_model")', + 'source("raw", "source_table")', + ], + } + + def test_ok(self): + exposure = self.ContractType( + name="my_exposure", + type=ExposureType.Dashboard, + owner=Owner(name="example", email="name@example.com", _extra={"slack": "#channel"}), + maturity=MaturityType.Medium, + url="https://example.com/dashboards/1", + description="A exposure", + config={}, + meta={"tool": "my_tool"}, + tags=["my_department"], + depends_on=['ref("my_model")', 'source("raw", "source_table")'], + ) + dct = self.get_ok_dict() + self.assert_symmetric(exposure, dct) + pickle.loads(pickle.dumps(exposure)) + + def test_ok_exposures(self): + for exposure_allowed in ("dashboard", "notebook", "analysis", "ml", "application"): + tst = self.get_ok_dict() + tst["type"] = exposure_allowed + assert self.ContractType.from_dict(tst).type == exposure_allowed + + def test_bad_exposure(self): + # bad exposure: None isn't allowed + for exposure_not_allowed in (None, "not an exposure"): + tst = self.get_ok_dict() + tst["type"] = exposure_not_allowed + self.assert_fails_validation(tst) + + def test_no_exposure(self): + tst = self.get_ok_dict() + del tst["type"] + self.assert_fails_validation(tst) + + def test_ok_maturities(self): + for maturity_allowed in (None, "low", "medium", "high"): + tst = self.get_ok_dict() + tst["maturity"] = maturity_allowed + assert self.ContractType.from_dict(tst).maturity == maturity_allowed + + tst = self.get_ok_dict() + del tst["maturity"] + assert self.ContractType.from_dict(tst).maturity is None + + def test_bad_maturity(self): + tst = self.get_ok_dict() + tst["maturity"] = "invalid maturity" + self.assert_fails_validation(tst) + + def test_bad_owner_missing_things(self): + tst = self.get_ok_dict() + del tst["owner"]["email"] + del tst["owner"]["name"] + self.assert_fails_validation(tst) + + del tst["owner"] + self.assert_fails_validation(tst) + + def test_bad_tags(self): + tst = self.get_ok_dict() + tst["tags"] = [123] + self.assert_fails_validation(tst) + + +class TestUnparsedMetric(ContractTestCase): + ContractType = UnparsedMetric + + def get_ok_dict(self): + return { + "name": "new_customers", + "label": "New Customers", + "description": "New customers", + "type": "simple", + "type_params": { + "measure": { + "name": "customers", + "filter": "is_new = true", + "join_to_timespine": False, + }, + }, + "config": {}, + "tags": [], + "meta": {"is_okr": True}, + } + + def test_ok(self): + metric = self.ContractType( + name="new_customers", + label="New Customers", + description="New customers", + type="simple", + type_params=UnparsedMetricTypeParams( + measure=UnparsedMetricInputMeasure( + name="customers", + filter="is_new = true", + ) + ), + config={}, + meta={"is_okr": True}, + ) + dct = self.get_ok_dict() + self.assert_symmetric(metric, dct) + pickle.loads(pickle.dumps(metric)) + + def test_bad_metric_no_type_params(self): + tst = self.get_ok_dict() + del tst["type_params"] + self.assert_fails_validation(tst) + + def test_bad_tags(self): + tst = self.get_ok_dict() + tst["tags"] = [123] + self.assert_fails_validation(tst) + + +class TestUnparsedVersion(ContractTestCase): + ContractType = UnparsedVersion + + def get_ok_dict(self): + return { + "v": 2, + "defined_in": "test_defined_in", + "description": "A version", + "config": {}, + "constraints": [], + "docs": {"show": False}, + "data_tests": [], + "columns": [], + } + + def test_ok(self): + version = self.ContractType( + v=2, + defined_in="test_defined_in", + description="A version", + config={}, + constraints=[], + docs=Docs(show=False), + data_tests=[], + columns=[], + ) + dct = self.get_ok_dict() + self.assert_symmetric(version, dct) + pickle.loads(pickle.dumps(version)) + + def test_bad_version_no_v(self): + version = self.get_ok_dict() + del version["v"] + self.assert_fails_validation(version) + + +@pytest.mark.parametrize( + "left,right,expected_lt", + [ + # same types + (2, 12, True), + (12, 2, False), + ("a", "b", True), + ("b", "a", False), + # mismatched types - numeric + (2, 12.0, True), + (12.0, 2, False), + (2, "12", True), + ("12", 2, False), + # mismatched types + (1, "test", True), + ("test", 1, False), + ], +) +def test_unparsed_version_lt(left, right, expected_lt): + assert (UnparsedVersion(left) < UnparsedVersion(right)) == expected_lt def test_column_parse(): diff --git a/tests/unit/test_contracts_project.py b/tests/unit/contracts/test_project.py similarity index 95% rename from tests/unit/test_contracts_project.py rename to tests/unit/contracts/test_project.py index 89713f6860d..37e57a33c12 100644 --- a/tests/unit/test_contracts_project.py +++ b/tests/unit/contracts/test_project.py @@ -1,8 +1,6 @@ -from .utils import ContractTestCase - -from dbt_common.dataclass_schema import ValidationError - from dbt.contracts.project import Project +from dbt_common.dataclass_schema import ValidationError +from tests.unit.utils import ContractTestCase class TestProject(ContractTestCase): diff --git a/tests/unit/deps/__init__.py b/tests/unit/deps/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_deps.py b/tests/unit/deps/test_deps.py similarity index 98% rename from tests/unit/test_deps.py rename to tests/unit/deps/test_deps.py index d4edd90b134..339bbbc5d23 100644 --- a/tests/unit/test_deps.py +++ b/tests/unit/deps/test_deps.py @@ -1,30 +1,29 @@ -from copy import deepcopy - import unittest +from argparse import Namespace +from copy import deepcopy from unittest import mock import dbt.deps import dbt.exceptions -from dbt.deps.git import GitUnpinnedPackage -from dbt.deps.local import LocalUnpinnedPackage, LocalPinnedPackage -from dbt.deps.tarball import TarballUnpinnedPackage -from dbt.deps.registry import RegistryUnpinnedPackage from dbt.clients.registry import is_compatible_version +from dbt.config.project import PartialProject from dbt.config.renderer import DbtProjectYamlRenderer -from dbt.deps.resolver import resolve_packages from dbt.contracts.project import ( - LocalPackage, - TarballPackage, GitPackage, + LocalPackage, + PackageConfig, RegistryPackage, + TarballPackage, ) -from dbt.config.project import PartialProject -from dbt.contracts.project import PackageConfig -from dbt_common.semver import VersionSpecifier +from dbt.deps.git import GitUnpinnedPackage +from dbt.deps.local import LocalPinnedPackage, LocalUnpinnedPackage +from dbt.deps.registry import RegistryUnpinnedPackage +from dbt.deps.resolver import resolve_packages +from dbt.deps.tarball import TarballUnpinnedPackage +from dbt.flags import set_from_args from dbt.version import get_installed_version from dbt_common.dataclass_schema import ValidationError -from dbt.flags import set_from_args -from argparse import Namespace +from dbt_common.semver import VersionSpecifier set_from_args(Namespace(WARN_ERROR=False), None) @@ -792,6 +791,19 @@ def test_dependency_resolution(self): self.assertEqual(resolved[1].name, "dbt-labs-test/b") self.assertEqual(resolved[1].version, "0.2.1") + def test_private_package_raise_error(self): + package_config = PackageConfig.from_dict( + { + "packages": [ + {"private": "dbt-labs-test/a", "subdirectory": "foo-bar"}, + ], + } + ) + with self.assertRaisesRegex( + dbt.exceptions.DependencyError, "Cannot resolve private package" + ): + resolve_packages(package_config.packages, mock.MagicMock(project_name="test"), {}) + def test_dependency_resolution_allow_prerelease(self): package_config = PackageConfig.from_dict( { diff --git a/tests/unit/events/__init__.py b/tests/unit/events/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/events/test_logging.py b/tests/unit/events/test_logging.py new file mode 100644 index 00000000000..00284ecab78 --- /dev/null +++ b/tests/unit/events/test_logging.py @@ -0,0 +1,38 @@ +from argparse import Namespace + +from pytest_mock import MockerFixture + +from dbt.events.logging import setup_event_logger +from dbt.flags import get_flags, set_from_args +from dbt_common.events.base_types import BaseEvent +from dbt_common.events.event_manager_client import get_event_manager +from dbt_common.events.logger import LoggerConfig +from tests.utils import EventCatcher + + +class TestSetupEventLogger: + def test_clears_preexisting_event_manager_state(self) -> None: + manager = get_event_manager() + manager.add_logger(LoggerConfig(name="test_logger")) + manager.callbacks.append(EventCatcher(BaseEvent).catch) + assert len(manager.loggers) == 1 + assert len(manager.callbacks) == 1 + + args = Namespace(log_level="none", log_level_file="none") + set_from_args(args, {}) + + setup_event_logger(get_flags()) + assert len(manager.loggers) == 0 + assert len(manager.callbacks) == 0 + + def test_specify_max_bytes( + self, + mocker: MockerFixture, + ) -> None: + patched_file_handler = mocker.patch("dbt_common.events.logger.RotatingFileHandler") + args = Namespace(log_file_max_bytes=1234567) + set_from_args(args, {}) + setup_event_logger(get_flags()) + patched_file_handler.assert_called_once_with( + filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5 + ) diff --git a/tests/unit/test_proto_events.py b/tests/unit/events/test_types.py similarity index 93% rename from tests/unit/test_proto_events.py rename to tests/unit/events/test_types.py index 906e061a409..51fdf8a2024 100644 --- a/tests/unit/test_proto_events.py +++ b/tests/unit/events/test_types.py @@ -1,25 +1,23 @@ -from dbt.adapters.events.types import ( - RollbackFailed, - PluginLoadError, -) -from dbt_common.events.functions import msg_to_dict, msg_to_json, LOG_VERSION, reset_metadata_vars -from dbt_common.events import types_pb2 -from dbt_common.events.base_types import msg_from_base_event, EventLevel +from google.protobuf.json_format import MessageToDict + +from dbt.adapters.events.types import PluginLoadError, RollbackFailed +from dbt.events import core_types_pb2 from dbt.events.types import ( - MainReportVersion, - MainReportArgs, - MainEncounteredError, LogStartLine, LogTestResult, + MainEncounteredError, + MainReportArgs, + MainReportVersion, ) -from dbt.events import core_types_pb2 from dbt.version import installed -from google.protobuf.json_format import MessageToDict -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) - +from dbt_common.events import types_pb2 +from dbt_common.events.base_types import EventLevel, msg_from_base_event +from dbt_common.events.functions import ( + LOG_VERSION, + msg_to_dict, + msg_to_json, + reset_metadata_vars, +) info_keys = { "name", diff --git a/tests/unit/fixtures.py b/tests/unit/fixtures.py index 6b4945911d3..cfcc1636b18 100644 --- a/tests/unit/fixtures.py +++ b/tests/unit/fixtures.py @@ -1,15 +1,14 @@ +from dbt.artifacts.resources import Contract, TestConfig, TestMetadata from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import ( DependsOn, + GenericTestNode, InjectedCTE, - ModelNode, ModelConfig, - GenericTestNode, + ModelNode, ) from dbt.node_types import NodeType -from dbt.artifacts.resources import Contract, TestConfig, TestMetadata - def model_node(): return ModelNode( @@ -26,8 +25,8 @@ def model_node(): sources=[], metrics=[], depends_on=DependsOn(), - deferred=True, description="", + primary_key=[], database="test_db", schema="test_schema", alias="bar", @@ -59,7 +58,6 @@ def generic_test_node(): sources=[], metrics=[], depends_on=DependsOn(), - deferred=False, description="", database="test_db", schema="dbt_test__audit", diff --git a/tests/unit/graph/__init__.py b/tests/unit/graph/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_graph_selector_parsing.py b/tests/unit/graph/test_cli.py similarity index 99% rename from tests/unit/test_graph_selector_parsing.py rename to tests/unit/graph/test_cli.py index f330412ffc8..62cabab14f6 100644 --- a/tests/unit/test_graph_selector_parsing.py +++ b/tests/unit/graph/test_cli.py @@ -1,15 +1,16 @@ -from dbt.graph import ( - cli, - SelectionUnion, - SelectionIntersection, - SelectionDifference, - SelectionCriteria, -) -from dbt.graph.selector_methods import MethodName import textwrap + import yaml from dbt.contracts.selection import SelectorFile +from dbt.graph import ( + SelectionCriteria, + SelectionDifference, + SelectionIntersection, + SelectionUnion, + cli, +) +from dbt.graph.selector_methods import MethodName def parse_file(txt: str) -> SelectorFile: diff --git a/tests/unit/test_infer_primary_key.py b/tests/unit/graph/test_nodes.py similarity index 59% rename from tests/unit/test_infer_primary_key.py rename to tests/unit/graph/test_nodes.py index 4afa2bf4652..4a62db34b11 100644 --- a/tests/unit/test_infer_primary_key.py +++ b/tests/unit/graph/test_nodes.py @@ -1,20 +1,124 @@ +from copy import deepcopy +from typing import List + +import pytest +from dbt_semantic_interfaces.references import MeasureReference +from dbt_semantic_interfaces.type_enums import ( + AggregationType, + DimensionType, + EntityType, +) + +from dbt.artifacts.resources import Defaults, Dimension, Entity, Measure, TestMetadata +from dbt.artifacts.resources.v1.semantic_model import NodeRelation +from dbt.contracts.graph.model_config import TestConfig +from dbt.contracts.graph.nodes import ColumnInfo, SemanticModel +from dbt.node_types import NodeType from dbt_common.contracts.constraints import ( + ColumnLevelConstraint, ConstraintType, ModelLevelConstraint, - ColumnLevelConstraint, ) +from tests.unit.fixtures import generic_test_node, model_node + + +class TestSemanticModel: + @pytest.fixture(scope="function") + def dimensions(self) -> List[Dimension]: + return [Dimension(name="ds", type=DimensionType)] + + @pytest.fixture(scope="function") + def entities(self) -> List[Entity]: + return [Entity(name="test_entity", type=EntityType.PRIMARY, expr="id")] + + @pytest.fixture(scope="function") + def measures(self) -> List[Measure]: + return [Measure(name="test_measure", agg=AggregationType.COUNT, expr="id")] + + @pytest.fixture(scope="function") + def default_semantic_model( + self, dimensions: List[Dimension], entities: List[Entity], measures: List[Measure] + ) -> SemanticModel: + return SemanticModel( + name="test_semantic_model", + resource_type=NodeType.SemanticModel, + model="ref('test_model')", + package_name="test", + path="test_path", + original_file_path="test_fixture", + unique_id=f"{NodeType.SemanticModel}.test.test_semantic_model", + fqn=[], + defaults=Defaults(agg_time_dimension="ds"), + dimensions=dimensions, + entities=entities, + measures=measures, + node_relation=NodeRelation( + alias="test_alias", schema_name="test_schema", database="test_database" + ), + ) + + def test_checked_agg_time_dimension_for_measure_via_defaults( + self, + default_semantic_model: SemanticModel, + ): + assert default_semantic_model.defaults.agg_time_dimension is not None + measure = default_semantic_model.measures[0] + measure.agg_time_dimension = None + default_semantic_model.checked_agg_time_dimension_for_measure( + MeasureReference(element_name=measure.name) + ) + + def test_checked_agg_time_dimension_for_measure_via_measure( + self, default_semantic_model: SemanticModel + ): + default_semantic_model.defaults = None + measure = default_semantic_model.measures[0] + measure.agg_time_dimension = default_semantic_model.dimensions[0].name + default_semantic_model.checked_agg_time_dimension_for_measure( + MeasureReference(element_name=measure.name) + ) + + def test_checked_agg_time_dimension_for_measure_exception( + self, default_semantic_model: SemanticModel + ): + default_semantic_model.defaults = None + measure = default_semantic_model.measures[0] + measure.agg_time_dimension = None + + with pytest.raises(AssertionError) as execinfo: + default_semantic_model.checked_agg_time_dimension_for_measure( + MeasureReference(measure.name) + ) -from .fixtures import model_node, generic_test_node + assert ( + f"Aggregation time dimension for measure {measure.name} on semantic model {default_semantic_model.name}" + in str(execinfo.value) + ) -from dbt.contracts.graph.model_config import ( - TestConfig, -) -from dbt.contracts.graph.nodes import ( - ColumnInfo, -) -from dbt.artifacts.resources import TestMetadata + def test_semantic_model_same_contents(self, default_semantic_model: SemanticModel): + default_semantic_model_copy = deepcopy(default_semantic_model) + + assert default_semantic_model.same_contents(default_semantic_model_copy) + + def test_semantic_model_same_contents_update_model( + self, default_semantic_model: SemanticModel + ): + default_semantic_model_copy = deepcopy(default_semantic_model) + default_semantic_model_copy.model = "ref('test_another_model')" + + assert not default_semantic_model.same_contents(default_semantic_model_copy) + + def test_semantic_model_same_contents_different_node_relation( + self, + default_semantic_model: SemanticModel, + ): + default_semantic_model_copy = deepcopy(default_semantic_model) + default_semantic_model_copy.node_relation.alias = "test_another_alias" + # Relation should not be consided in same_contents + assert default_semantic_model.same_contents(default_semantic_model_copy) +# Infer primary key def test_no_primary_key(): model = model_node() assert model.infer_primary_key([]) == [] diff --git a/tests/unit/graph/test_queue.py b/tests/unit/graph/test_queue.py new file mode 100644 index 00000000000..50671d03fb2 --- /dev/null +++ b/tests/unit/graph/test_queue.py @@ -0,0 +1,47 @@ +import networkx as nx +import pytest + +from dbt.contracts.graph.manifest import Manifest +from dbt.graph.queue import GraphQueue +from tests.unit.utils import MockNode, make_manifest + + +class TestGraphQueue: + @pytest.fixture(scope="class") + def manifest(self) -> Manifest: + return make_manifest( + nodes=[ + MockNode(package="test_package", name="upstream_model"), + MockNode(package="test_package", name="downstream_model"), + ] + ) + + @pytest.fixture(scope="class") + def graph(self) -> nx.DiGraph: + graph = nx.DiGraph() + graph.add_edge("model.test_package.upstream_model", "model.test_package.downstream_model") + return graph + + def test_init_graph_queue(self, manifest, graph): + graph_queue = GraphQueue(graph=graph, manifest=manifest, selected={}) + + assert graph_queue.manifest == manifest + assert graph_queue.graph == graph + assert graph_queue.inner.queue == [(0, "model.test_package.upstream_model")] + assert graph_queue.in_progress == set() + assert graph_queue.queued == {"model.test_package.upstream_model"} + assert graph_queue.lock + + def test_init_graph_queue_preserve_edges_false(self, manifest, graph): + graph_queue = GraphQueue(graph=graph, manifest=manifest, selected={}, preserve_edges=False) + + # when preserve_edges is set to false, dependencies between nodes are no longer tracked in the priority queue + assert list(graph_queue.graph.edges) == [] + assert graph_queue.inner.queue == [ + (0, "model.test_package.downstream_model"), + (0, "model.test_package.upstream_model"), + ] + assert graph_queue.queued == { + "model.test_package.upstream_model", + "model.test_package.downstream_model", + } diff --git a/tests/unit/test_graph.py b/tests/unit/graph/test_selector.py similarity index 62% rename from tests/unit/test_graph.py rename to tests/unit/graph/test_selector.py index b22b5302c14..677fb1c46bd 100644 --- a/tests/unit/test_graph.py +++ b/tests/unit/graph/test_selector.py @@ -1,33 +1,236 @@ import os - +import string import unittest +from argparse import Namespace +from queue import Empty from unittest.mock import MagicMock, patch -from dbt.adapters.postgres import Plugin as PostgresPlugin -from dbt.adapters.factory import reset_adapters, register_adapter +import networkx as nx +import pytest + import dbt.compilation +import dbt.config import dbt.exceptions +import dbt.graph.cli as graph_cli +import dbt.graph.selector as graph_selector import dbt.parser -import dbt.config -import dbt.utils import dbt.parser.manifest +import dbt.utils +import dbt_common.exceptions from dbt import tracking +from dbt.adapters.factory import register_adapter, reset_adapters +from dbt.adapters.postgres import Plugin as PostgresPlugin from dbt.cli.flags import convert_config -from dbt.contracts.files import SourceFile, FileHash, FilePath +from dbt.contracts.files import FileHash, FilePath, SourceFile from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck from dbt.contracts.project import ProjectFlags +from dbt.events.logging import setup_event_logger from dbt.flags import get_flags, set_from_args from dbt.graph import NodeSelector, parse_difference -from dbt.events.logging import setup_event_logger from dbt.mp_context import get_mp_context -from queue import Empty -from .utils import config_from_parts_or_dicts, generate_name_macros, inject_plugin - -from argparse import Namespace +from dbt.node_types import NodeType +from tests.unit.utils import ( + config_from_parts_or_dicts, + generate_name_macros, + inject_plugin, +) set_from_args(Namespace(WARN_ERROR=False), None) +def _get_graph(): + integer_graph = nx.balanced_tree(2, 2, nx.DiGraph()) + + package_mapping = { + i: "m." + ("X" if i % 2 == 0 else "Y") + "." + letter + for (i, letter) in enumerate(string.ascii_lowercase) + } + + # Edges: [(X.a, Y.b), (X.a, X.c), (Y.b, Y.d), (Y.b, X.e), (X.c, Y.f), (X.c, X.g)] + return graph_selector.Graph(nx.relabel_nodes(integer_graph, package_mapping)) + + +def _get_manifest(graph): + nodes = {} + for unique_id in graph: + fqn = unique_id.split(".") + node = MagicMock( + unique_id=unique_id, + fqn=fqn, + package_name=fqn[0], + tags=[], + resource_type=NodeType.Model, + empty=False, + config=MagicMock(enabled=True), + is_versioned=False, + ) + nodes[unique_id] = node + + nodes["m.X.a"].tags = ["abc"] + nodes["m.Y.b"].tags = ["abc", "bcef"] + nodes["m.X.c"].tags = ["abc", "bcef"] + nodes["m.Y.d"].tags = [] + nodes["m.X.e"].tags = ["efg", "bcef"] + nodes["m.Y.f"].tags = ["efg", "bcef"] + nodes["m.X.g"].tags = ["efg"] + return MagicMock(nodes=nodes) + + +@pytest.fixture +def graph(): + return _get_graph() + + +@pytest.fixture +def manifest(graph): + return _get_manifest(graph) + + +def id_macro(arg): + if isinstance(arg, str): + return arg + try: + return "_".join(arg) + except TypeError: + return arg + + +run_specs = [ + # include by fqn + (["X.a"], [], {"m.X.a"}), + # include by tag + (["tag:abc"], [], {"m.X.a", "m.Y.b", "m.X.c"}), + # exclude by tag + (["*"], ["tag:abc"], {"m.Y.d", "m.X.e", "m.Y.f", "m.X.g"}), + # tag + fqn + (["tag:abc", "a"], [], {"m.X.a", "m.Y.b", "m.X.c"}), + (["tag:abc", "d"], [], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.d"}), + # multiple node selection across packages + (["X.a", "b"], [], {"m.X.a", "m.Y.b"}), + (["X.a+"], ["b"], {"m.X.a", "m.X.c", "m.Y.d", "m.X.e", "m.Y.f", "m.X.g"}), + # children + (["X.c+"], [], {"m.X.c", "m.Y.f", "m.X.g"}), + (["X.a+1"], [], {"m.X.a", "m.Y.b", "m.X.c"}), + (["X.a+"], ["tag:efg"], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.d"}), + # parents + (["+Y.f"], [], {"m.X.c", "m.Y.f", "m.X.a"}), + (["1+Y.f"], [], {"m.X.c", "m.Y.f"}), + # childrens parents + (["@X.c"], [], {"m.X.a", "m.X.c", "m.Y.f", "m.X.g"}), + # multiple selection/exclusion + (["tag:abc", "tag:bcef"], [], {"m.X.a", "m.Y.b", "m.X.c", "m.X.e", "m.Y.f"}), + (["tag:abc", "tag:bcef"], ["tag:efg"], {"m.X.a", "m.Y.b", "m.X.c"}), + (["tag:abc", "tag:bcef"], ["tag:efg", "a"], {"m.Y.b", "m.X.c"}), + # intersections + (["a,a"], [], {"m.X.a"}), + (["+c,c+"], [], {"m.X.c"}), + (["a,b"], [], set()), + (["tag:abc,tag:bcef"], [], {"m.Y.b", "m.X.c"}), + (["*,tag:abc,a"], [], {"m.X.a"}), + (["a,tag:abc,*"], [], {"m.X.a"}), + (["tag:abc,tag:bcef"], ["c"], {"m.Y.b"}), + (["tag:bcef,tag:efg"], ["tag:bcef,@b"], {"m.Y.f"}), + (["tag:bcef,tag:efg"], ["tag:bcef,@a"], set()), + (["*,@a,+b"], ["*,tag:abc,tag:bcef"], {"m.X.a"}), + (["tag:bcef,tag:efg", "*,tag:abc"], [], {"m.X.a", "m.Y.b", "m.X.c", "m.X.e", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["e"], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["e"], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["e", "f"], {"m.X.a", "m.Y.b", "m.X.c"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["tag:abc,tag:bcef"], {"m.X.a", "m.X.e", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["tag:abc,tag:bcef", "tag:abc,a"], {"m.X.e", "m.Y.f"}), +] + + +@pytest.mark.parametrize("include,exclude,expected", run_specs, ids=id_macro) +def test_run_specs(include, exclude, expected, graph, manifest): + selector = graph_selector.NodeSelector(graph, manifest) + spec = graph_cli.parse_difference(include, exclude) + selected, _ = selector.select_nodes(spec) + + assert selected == expected + + +param_specs = [ + ("a", False, None, False, None, "fqn", "a", False), + ("+a", True, None, False, None, "fqn", "a", False), + ("256+a", True, 256, False, None, "fqn", "a", False), + ("a+", False, None, True, None, "fqn", "a", False), + ("a+256", False, None, True, 256, "fqn", "a", False), + ("+a+", True, None, True, None, "fqn", "a", False), + ("16+a+32", True, 16, True, 32, "fqn", "a", False), + ("@a", False, None, False, None, "fqn", "a", True), + ("a.b", False, None, False, None, "fqn", "a.b", False), + ("+a.b", True, None, False, None, "fqn", "a.b", False), + ("256+a.b", True, 256, False, None, "fqn", "a.b", False), + ("a.b+", False, None, True, None, "fqn", "a.b", False), + ("a.b+256", False, None, True, 256, "fqn", "a.b", False), + ("+a.b+", True, None, True, None, "fqn", "a.b", False), + ("16+a.b+32", True, 16, True, 32, "fqn", "a.b", False), + ("@a.b", False, None, False, None, "fqn", "a.b", True), + ("a.b.*", False, None, False, None, "fqn", "a.b.*", False), + ("+a.b.*", True, None, False, None, "fqn", "a.b.*", False), + ("256+a.b.*", True, 256, False, None, "fqn", "a.b.*", False), + ("a.b.*+", False, None, True, None, "fqn", "a.b.*", False), + ("a.b.*+256", False, None, True, 256, "fqn", "a.b.*", False), + ("+a.b.*+", True, None, True, None, "fqn", "a.b.*", False), + ("16+a.b.*+32", True, 16, True, 32, "fqn", "a.b.*", False), + ("@a.b.*", False, None, False, None, "fqn", "a.b.*", True), + ("tag:a", False, None, False, None, "tag", "a", False), + ("+tag:a", True, None, False, None, "tag", "a", False), + ("256+tag:a", True, 256, False, None, "tag", "a", False), + ("tag:a+", False, None, True, None, "tag", "a", False), + ("tag:a+256", False, None, True, 256, "tag", "a", False), + ("+tag:a+", True, None, True, None, "tag", "a", False), + ("16+tag:a+32", True, 16, True, 32, "tag", "a", False), + ("@tag:a", False, None, False, None, "tag", "a", True), + ("source:a", False, None, False, None, "source", "a", False), + ("source:a+", False, None, True, None, "source", "a", False), + ("source:a+1", False, None, True, 1, "source", "a", False), + ("source:a+32", False, None, True, 32, "source", "a", False), + ("@source:a", False, None, False, None, "source", "a", True), +] + + +@pytest.mark.parametrize( + "spec,parents,parents_depth,children,children_depth,filter_type,filter_value,childrens_parents", + param_specs, + ids=id_macro, +) +def test_parse_specs( + spec, + parents, + parents_depth, + children, + children_depth, + filter_type, + filter_value, + childrens_parents, +): + parsed = graph_selector.SelectionCriteria.from_single_spec(spec) + assert parsed.parents == parents + assert parsed.parents_depth == parents_depth + assert parsed.children == children + assert parsed.children_depth == children_depth + assert parsed.method == filter_type + assert parsed.value == filter_value + assert parsed.childrens_parents == childrens_parents + + +invalid_specs = [ + "@a+", + "@a.b+", + "@a.b*+", + "@tag:a+", + "@source:a+", +] + + +@pytest.mark.parametrize("invalid", invalid_specs, ids=lambda k: str(k)) +def test_invalid_specs(invalid): + with pytest.raises(dbt_common.exceptions.DbtRuntimeError): + graph_selector.SelectionCriteria.from_single_spec(invalid) + + class GraphTest(unittest.TestCase): def tearDown(self): self.mock_filesystem_search.stop() @@ -342,7 +545,12 @@ def test__dependency_list(self): # dbt.cli.params.indirect_selection # # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - queue = selector.get_graph_queue(parse_difference(None, None, "eager")) + queue = selector.get_graph_queue( + parse_difference( + None, + None, + ) + ) for model_id in model_ids: self.assertFalse(queue.empty()) diff --git a/tests/unit/test_graph_selector_methods.py b/tests/unit/graph/test_selector_methods.py similarity index 95% rename from tests/unit/test_graph_selector_methods.py rename to tests/unit/graph/test_selector_methods.py index 1a3a16fdafc..28ed9202f86 100644 --- a/tests/unit/test_graph_selector_methods.py +++ b/tests/unit/graph/test_selector_methods.py @@ -1,44 +1,46 @@ import copy -import pytest from dataclasses import replace +from pathlib import Path from unittest import mock -from pathlib import Path +import pytest + +import dbt_common.exceptions from dbt.artifacts.resources import ColumnInfo, FileHash from dbt.contracts.graph.manifest import Manifest - from dbt.contracts.state import PreviousState from dbt.graph.selector_methods import ( - MethodManager, - QualifiedNameSelectorMethod, - TagSelectorMethod, - GroupSelectorMethod, AccessSelectorMethod, - SourceSelectorMethod, - PathSelectorMethod, - FileSelectorMethod, - PackageSelectorMethod, ConfigSelectorMethod, - TestNameSelectorMethod, - TestTypeSelectorMethod, - StateSelectorMethod, ExposureSelectorMethod, + FileSelectorMethod, + GroupSelectorMethod, + MethodManager, MetricSelectorMethod, - VersionSelectorMethod, + PackageSelectorMethod, + PathSelectorMethod, + QualifiedNameSelectorMethod, SavedQuerySelectorMethod, SemanticModelSelectorMethod, + SourceSelectorMethod, + StateSelectorMethod, + TagSelectorMethod, + TestNameSelectorMethod, + TestTypeSelectorMethod, + UnitTestSelectorMethod, + VersionSelectorMethod, ) -import dbt_common.exceptions -from .utils import replace_config +from tests.unit.utils import replace_config from tests.unit.utils.manifest import ( - make_model, - make_seed, make_exposure, + make_group, + make_macro, make_metric, + make_model, make_saved_query, + make_seed, make_semantic_model, - make_group, - make_macro, + make_unit_test, ) @@ -343,6 +345,33 @@ def test_select_package(manifest): } +def test_select_package_this(manifest): + new_manifest = copy.deepcopy(manifest) + + # change the package name for all nodes except ones where the unique_id contains "table_model" + for id, node in new_manifest.nodes.items(): + if "table_model" not in id: + node.package_name = "foo" + + for source in new_manifest.sources.values(): + if "table_model" not in source.unique_id: + source.package_name = "foo" + + methods = MethodManager(new_manifest, None) + method = methods.get_method("package", []) + assert isinstance(method, PackageSelectorMethod) + assert method.arguments == [] + + assert search_manifest_using_method(new_manifest, method, "this") == { + "not_null_table_model_id", + "table_model", + "table_model_csv", + "table_model_py", + "unique_table_model_id", + "unit_test_table_model", + } + + def test_select_config_materialized(manifest): methods = MethodManager(manifest, None) method = methods.get_method("config", ["materialized"]) @@ -558,6 +587,24 @@ def test_select_saved_query_by_tag(manifest: Manifest) -> None: search_manifest_using_method(manifest, method, "any_tag") +def test_select_unit_test(manifest: Manifest) -> None: + test_model = make_model("test", "my_model", "select 1 as id") + unit_test = make_unit_test("test", "my_unit_test", test_model) + manifest.unit_tests[unit_test.unique_id] = unit_test + methods = MethodManager(manifest, None) + method = methods.get_method("unit_test", []) + + assert isinstance(method, UnitTestSelectorMethod) + assert not search_manifest_using_method(manifest, method, "not_test_unit_test") + assert search_manifest_using_method(manifest, method, "*nit_test") == {unit_test.search_name} + assert search_manifest_using_method(manifest, method, "test.my_unit_test") == { + unit_test.search_name + } + assert search_manifest_using_method(manifest, method, "my_unit_test") == { + unit_test.search_name + } + + @pytest.fixture def previous_state(manifest): writable = copy.deepcopy(manifest).writable_manifest() diff --git a/tests/unit/test_graph_selector_spec.py b/tests/unit/graph/test_selector_spec.py similarity index 73% rename from tests/unit/test_graph_selector_spec.py rename to tests/unit/graph/test_selector_spec.py index 8a19a8b5934..451b107d85c 100644 --- a/tests/unit/test_graph_selector_spec.py +++ b/tests/unit/graph/test_selector_spec.py @@ -1,14 +1,61 @@ +import os +from unittest.mock import patch + import pytest from dbt.exceptions import DbtRuntimeError +from dbt.graph.selector_methods import MethodName from dbt.graph.selector_spec import ( + IndirectSelection, SelectionCriteria, - SelectionIntersection, SelectionDifference, + SelectionIntersection, SelectionUnion, ) -from dbt.graph.selector_methods import MethodName -import os + + +@pytest.mark.parametrize( + "indirect_selection_value,expected_value", + [(v, v) for v in IndirectSelection], +) +def test_selection_criteria_default_indirect_value(indirect_selection_value, expected_value): + # Check selection criteria with indirect selection value would follow the resolved value in flags + # if indirect selection is not specified in the selection criteria. + with patch("dbt.graph.selector_spec.get_flags") as patched_get_flags: + patched_get_flags.return_value.INDIRECT_SELECTION = indirect_selection_value + patched_get_flags.INDIRECT_SELECTION = indirect_selection_value + selection_dict_without_indirect_selection_specified = { + "method": "path", + "value": "models/marts/orders.sql", + "children": False, + "parents": False, + } + selection_criteria_without_indirect_selection_specified = ( + SelectionCriteria.selection_criteria_from_dict( + selection_dict_without_indirect_selection_specified, + selection_dict_without_indirect_selection_specified, + ) + ) + assert ( + selection_criteria_without_indirect_selection_specified.indirect_selection + == expected_value + ) + selection_dict_without_indirect_selection_specified = { + "method": "path", + "value": "models/marts/orders.sql", + "children": False, + "parents": False, + "indirect_selection": "buildable", + } + selection_criteria_with_indirect_selection_specified = ( + SelectionCriteria.selection_criteria_from_dict( + selection_dict_without_indirect_selection_specified, + selection_dict_without_indirect_selection_specified, + ) + ) + assert ( + selection_criteria_with_indirect_selection_specified.indirect_selection == "buildable" + ) def test_raw_parse_simple(): diff --git a/tests/unit/mock_adapter.py b/tests/unit/mock_adapter.py index 8858542619b..c1f8a062668 100644 --- a/tests/unit/mock_adapter.py +++ b/tests/unit/mock_adapter.py @@ -1,7 +1,7 @@ +from contextlib import contextmanager from unittest import mock from dbt.adapters.base import BaseAdapter -from contextlib import contextmanager def adapter_factory(): diff --git a/tests/unit/parser/__init__.py b/tests/unit/parser/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_docs_blocks.py b/tests/unit/parser/test_docs.py similarity index 98% rename from tests/unit/test_docs_blocks.py rename to tests/unit/parser/test_docs.py index cdf9933ce50..2eb5d12c383 100644 --- a/tests/unit/test_docs_blocks.py +++ b/tests/unit/parser/test_docs.py @@ -1,17 +1,15 @@ import os import unittest +from argparse import Namespace -from dbt.contracts.files import SourceFile, FileHash, FilePath +from dbt.contracts.files import FileHash, FilePath, SourceFile from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import Documentation +from dbt.flags import set_from_args from dbt.node_types import NodeType from dbt.parser import docs from dbt.parser.search import FileBlock - -from .utils import config_from_parts_or_dicts - -from dbt.flags import set_from_args -from argparse import Namespace +from tests.unit.utils import config_from_parts_or_dicts set_from_args(Namespace(WARN_ERROR=False), None) diff --git a/tests/unit/parser/test_manifest.py b/tests/unit/parser/test_manifest.py index 6a643e444f3..1f10ee04f25 100644 --- a/tests/unit/parser/test_manifest.py +++ b/tests/unit/parser/test_manifest.py @@ -1,26 +1,15 @@ -import pytest -from unittest.mock import patch, MagicMock from argparse import Namespace +from unittest.mock import MagicMock, patch +import pytest +from pytest_mock import MockerFixture -from dbt.contracts.graph.manifest import Manifest -from dbt.parser.manifest import ManifestLoader from dbt.config import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest from dbt.flags import set_from_args - - -@pytest.fixture -def mock_project(): - mock_project = MagicMock(RuntimeConfig) - mock_project.cli_vars = {} - mock_project.args = MagicMock() - mock_project.args.profile = "test" - mock_project.args.target = "test" - mock_project.project_env_vars = {} - mock_project.profile_env_vars = {} - mock_project.project_target_path = "mock_target_path" - mock_project.credentials = MagicMock() - return mock_project +from dbt.parser.manifest import ManifestLoader +from dbt.parser.read_files import FileDiff +from dbt.tracking import User class TestPartialParse: @@ -31,7 +20,6 @@ def test_partial_parse_file_path(self, patched_open, patched_os_exist, patched_s mock_project = MagicMock(RuntimeConfig) mock_project.project_target_path = "mock_target_path" patched_os_exist.return_value = True - set_from_args(Namespace(), {}) ManifestLoader(mock_project, {}) # by default we use the project_target_path patched_open.assert_called_with("mock_target_path/partial_parse.msgpack", "rb") @@ -44,7 +32,6 @@ def test_profile_hash_change(self, mock_project): # This test validate that the profile_hash is updated when the connection keys change profile_hash = "750bc99c1d64ca518536ead26b28465a224be5ffc918bf2a490102faa5a1bcf5" mock_project.credentials.connection_info.return_value = "test" - set_from_args(Namespace(), {}) manifest = ManifestLoader(mock_project, {}) assert manifest.manifest.state_check.profile_hash.checksum == profile_hash mock_project.credentials.connection_info.return_value = "test1" @@ -78,7 +65,6 @@ def test_partial_parse_safe_update_project_parser_files_partially( mock_saved_manifest.files = {} patched_read_manifest_for_partial_parse.return_value = mock_saved_manifest - set_from_args(Namespace(), {}) loader = ManifestLoader(mock_project, {}) loader.safe_update_project_parser_files_partially({}) @@ -91,3 +77,83 @@ def test_partial_parse_safe_update_project_parser_files_partially( assert "full_reparse_reason" in exc_info assert "KeyError: 'Whoopsie!'" == exc_info["exception"] assert isinstance(exc_info["code"], str) or isinstance(exc_info["code"], type(None)) + + +class TestGetFullManifest: + @pytest.fixture + def set_required_mocks( + self, mocker: MockerFixture, manifest: Manifest, mock_adapter: MagicMock + ): + mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter + mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest + mocker.patch("dbt.parser.manifest._check_manifest").return_value = None + mocker.patch( + "dbt.parser.manifest.ManifestLoader.save_macros_to_adapter" + ).return_value = None + mocker.patch("dbt.tracking.active_user").return_value = User(None) + + def test_write_perf_info( + self, + mock_project: MagicMock, + mocker: MockerFixture, + set_required_mocks, + ) -> None: + write_perf_info = mocker.patch("dbt.parser.manifest.ManifestLoader.write_perf_info") + + ManifestLoader.get_full_manifest( + config=mock_project, + # write_perf_info=False let it default instead + ) + assert not write_perf_info.called + + ManifestLoader.get_full_manifest(config=mock_project, write_perf_info=False) + assert not write_perf_info.called + + ManifestLoader.get_full_manifest(config=mock_project, write_perf_info=True) + assert write_perf_info.called + + def test_reset( + self, + mock_project: MagicMock, + mock_adapter: MagicMock, + set_required_mocks, + ) -> None: + + ManifestLoader.get_full_manifest( + config=mock_project, + # reset=False let it default instead + ) + assert not mock_project.clear_dependencies.called + assert not mock_adapter.clear_macro_resolver.called + + ManifestLoader.get_full_manifest(config=mock_project, reset=False) + assert not mock_project.clear_dependencies.called + assert not mock_adapter.clear_macro_resolver.called + + ManifestLoader.get_full_manifest(config=mock_project, reset=True) + assert mock_project.clear_dependencies.called + assert mock_adapter.clear_macro_resolver.called + + def test_partial_parse_file_diff_flag( + self, + mock_project: MagicMock, + mocker: MockerFixture, + set_required_mocks, + ) -> None: + + # FileDiff.from_dict is only called if PARTIAL_PARSE_FILE_DIFF == False + # So we can track this function call to check if setting PARTIAL_PARSE_FILE_DIFF + # works appropriately + mock_file_diff = mocker.patch("dbt.parser.read_files.FileDiff.from_dict") + mock_file_diff.return_value = FileDiff([], [], []) + + ManifestLoader.get_full_manifest(config=mock_project) + assert not mock_file_diff.called + + set_from_args(Namespace(PARTIAL_PARSE_FILE_DIFF=True), {}) + ManifestLoader.get_full_manifest(config=mock_project) + assert not mock_file_diff.called + + set_from_args(Namespace(PARTIAL_PARSE_FILE_DIFF=False), {}) + ManifestLoader.get_full_manifest(config=mock_project) + assert mock_file_diff.called diff --git a/tests/unit/test_parser.py b/tests/unit/parser/test_parser.py similarity index 98% rename from tests/unit/test_parser.py rename to tests/unit/parser/test_parser.py index e1eb643eb49..42398f48f39 100644 --- a/tests/unit/test_parser.py +++ b/tests/unit/parser/test_parser.py @@ -1,5 +1,6 @@ import os import unittest +from argparse import Namespace from copy import deepcopy from unittest import mock @@ -8,53 +9,56 @@ import dbt.flags import dbt.parser from dbt import tracking -from dbt.artifacts.resources import RefArgs +from dbt.artifacts.resources import ModelConfig, RefArgs from dbt.context.context_config import ContextConfig -from dbt.contracts.files import SourceFile, FileHash, FilePath, SchemaSourceFile +from dbt.contracts.files import FileHash, FilePath, SchemaSourceFile, SourceFile from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.model_config import NodeConfig, TestConfig, SnapshotConfig -from dbt.artifacts.resources import ModelConfig +from dbt.contracts.graph.model_config import NodeConfig, SnapshotConfig, TestConfig from dbt.contracts.graph.nodes import ( - ModelNode, - Macro, + AnalysisNode, DependsOn, + Macro, + ModelNode, SingularTestNode, SnapshotNode, - AnalysisNode, UnpatchedSourceDefinition, ) -from dbt.exceptions import CompilationError, ParsingError +from dbt.exceptions import CompilationError, ParsingError, SchemaConfigError +from dbt.flags import set_from_args from dbt.node_types import NodeType from dbt.parser import ( - ModelParser, - MacroParser, - SingularTestParser, + AnalysisParser, GenericTestParser, + MacroParser, + ModelParser, SchemaParser, + SingularTestParser, SnapshotParser, - AnalysisParser, ) from dbt.parser.common import YamlBlock from dbt.parser.models import ( _get_config_call_dict, - _shift_sources, _get_exp_sample_result, - _get_stable_sample_result, _get_sample_result, + _get_stable_sample_result, + _shift_sources, ) from dbt.parser.schemas import ( - TestablePatchParser, - ModelPatchParser, - SourceParser, AnalysisPatchParser, MacroPatchParser, + ModelPatchParser, + SourceParser, + TestablePatchParser, yaml_from_file, ) from dbt.parser.search import FileBlock from dbt.parser.sources import SourcePatcher -from .utils import config_from_parts_or_dicts, normalize, generate_name_macros, MockNode -from dbt.flags import set_from_args -from argparse import Namespace +from tests.unit.utils import ( + MockNode, + config_from_parts_or_dicts, + generate_name_macros, + normalize, +) set_from_args(Namespace(WARN_ERROR=False), None) @@ -273,6 +277,22 @@ def assertEqualNodes(node_one, node_two): arg: 100 """ +SINGLE_TABLE_MODEL_TESTS_WRONG_SEVERITY = """ +models: + - name: my_model + description: A description of my model + columns: + - name: color + description: The color value + data_tests: + - not_null: + severity: WARNING + - accepted_values: + values: ['red', 'blue', 'green'] + - foreign_package.test_case: + arg: 100 +""" + MULTIPLE_TABLE_VERSIONED_MODEL_TESTS = """ models: @@ -577,6 +597,14 @@ def test__read_basic_model_tests(self): self.assertEqual(len(list(self.parser.manifest.sources)), 0) self.assertEqual(len(list(self.parser.manifest.nodes)), 4) + def test__read_basic_model_tests_wrong_severity(self): + block = self.yaml_block_for(SINGLE_TABLE_MODEL_TESTS_WRONG_SEVERITY, "test_one.yml") + dct = yaml_from_file(block.file) + with self.assertRaisesRegex( + SchemaConfigError, "Severity must be either 'warn' or 'error'. Got 'WARNING'" + ): + self.parser.parse_file(block, dct) + def test__parse_basic_model_tests(self): block = self.file_block_for(SINGLE_TABLE_MODEL_TESTS, "test_one.yml") self.parser.manifest.files[block.file.file_id] = block.file diff --git a/tests/unit/parser/test_partial.py b/tests/unit/parser/test_partial.py new file mode 100644 index 00000000000..40f2e6e8022 --- /dev/null +++ b/tests/unit/parser/test_partial.py @@ -0,0 +1,211 @@ +import time +from copy import deepcopy +from typing import Dict, List + +import pytest + +from dbt.contracts.files import ( + BaseSourceFile, + FileHash, + FilePath, + ParseFileType, + SchemaSourceFile, + SourceFile, +) +from dbt.node_types import NodeType +from dbt.parser.partial import PartialParsing +from dbt.tests.util import safe_set_invocation_context +from tests.unit.utils import normalize +from tests.unit.utils.manifest import make_generic_test, make_model + +PROJECT_NAME = "my_test" + + +@pytest.fixture +def files() -> Dict[str, BaseSourceFile]: + project_root = "/users/root" + sql_model_file = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="my_model.sql", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("abcdef"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.my_model"], + env_vars=[], + ) + sql_model_file_untouched = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="my_model_untouched.sql", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("abcdef"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.my_model_untouched"], + env_vars=[], + ) + + python_model_file = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="python_model.py", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("lalala"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.python_model"], + env_vars=[], + ) + python_model_file_untouched = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="python_model_untouched.py", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("lalala"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.python_model_untouched"], + env_vars=[], + ) + schema_file = SchemaSourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="schema.yml", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("ghijkl"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Schema, + dfy={ + "version": 2, + "models": [ + {"name": "my_model", "description": "Test model"}, + {"name": "python_model", "description": "python"}, + {"name": "not_null", "model": "test.my_test.test_my_model"}, + ], + }, + ndp=["model.my_test.my_model"], + env_vars={}, + data_tests={"models": {"not_null": {"test.my_test.test_my_model": []}}}, + ) + return { + schema_file.file_id: schema_file, + sql_model_file.file_id: sql_model_file, + sql_model_file_untouched.file_id: sql_model_file_untouched, + python_model_file.file_id: python_model_file, + python_model_file_untouched.file_id: python_model_file_untouched, + } + + +@pytest.fixture +def nodes() -> List[NodeType]: + patch_path = "my_test://" + normalize("models/schema.yml") + my_model = make_model(PROJECT_NAME, "my_model", "", patch_path=patch_path) + return [ + my_model, + make_model(PROJECT_NAME, "my_model_untouched", "", patch_path=patch_path), + make_model(PROJECT_NAME, "python_model", "", language="python", patch_path=patch_path), + make_model( + PROJECT_NAME, "python_model_untouched", "", language="python", patch_path=patch_path + ), + make_generic_test(PROJECT_NAME, "test", my_model, {}), + ] + + +@pytest.fixture +def partial_parsing(manifest, files): + safe_set_invocation_context() + return PartialParsing(manifest, deepcopy(files)) + + +def test_simple(partial_parsing, files, nodes): + # Nothing has changed + assert partial_parsing is not None + assert partial_parsing.skip_parsing() is True + + # Change a model file + sql_model_file_id = "my_test://" + normalize("models/my_model.sql") + partial_parsing.new_files[sql_model_file_id].checksum = FileHash.from_contents("xyzabc") + + python_model_file_id = "my_test://" + normalize("models/python_model.py") + partial_parsing.new_files[python_model_file_id].checksum = FileHash.from_contents("ohohoh") + + partial_parsing.build_file_diff() + assert partial_parsing.skip_parsing() is False + pp_files = partial_parsing.get_parsing_files() + pp_files["my_test"]["ModelParser"] = set(pp_files["my_test"]["ModelParser"]) + # models has 'patch_path' so we expect to see a SchemaParser file listed + schema_file_id = "my_test://" + normalize("models/schema.yml") + expected_pp_files = { + "my_test": { + "ModelParser": set([sql_model_file_id, python_model_file_id]), + "SchemaParser": [schema_file_id], + } + } + assert pp_files == expected_pp_files + schema_file = files[schema_file_id] + schema_file_model_names = set([model["name"] for model in schema_file.pp_dict["models"]]) + expected_model_names = set(["python_model", "my_model"]) + assert schema_file_model_names == expected_model_names + schema_file_model_descriptions = set( + [model["description"] for model in schema_file.pp_dict["models"]] + ) + expected_model_descriptions = set(["Test model", "python"]) + assert schema_file_model_descriptions == expected_model_descriptions + + +def test_schedule_nodes_for_parsing_basic(partial_parsing, nodes): + assert partial_parsing.file_diff["deleted"] == [] + assert partial_parsing.project_parser_files == {} + partial_parsing.schedule_nodes_for_parsing([nodes[0].unique_id]) + assert partial_parsing.project_parser_files == { + "my_test": { + "ModelParser": ["my_test://models/my_model.sql"], + "SchemaParser": ["my_test://models/schema.yml"], + } + } + + +def test_schedule_macro_nodes_for_parsing_basic(partial_parsing): + # XXX it seems kind of confusing what exactly this function does. + # Whoever Changes this function please add more comment. + + # this rely on the dfy and data_tests fields in schema node to add schema file to reparse + partial_parsing.schedule_macro_nodes_for_parsing(["test.my_test.test_my_model"]) + assert partial_parsing.project_parser_files == { + "my_test": {"SchemaParser": ["my_test://models/schema.yml"]} + } + + +class TestFileDiff: + @pytest.fixture + def partial_parsing(self, manifest, files): + safe_set_invocation_context() + saved_files = deepcopy(files) + saved_files[ + "my_test://models/python_model_untouched.py" + ].checksum = FileHash.from_contents("something new") + return PartialParsing(manifest, saved_files) + + def test_build_file_diff_basic(self, partial_parsing): + partial_parsing.build_file_diff() + assert set(partial_parsing.file_diff["unchanged"]) == { + "my_test://models/my_model_untouched.sql", + "my_test://models/my_model.sql", + "my_test://models/schema.yml", + "my_test://models/python_model.py", + } + assert partial_parsing.file_diff["changed"] == [ + "my_test://models/python_model_untouched.py" + ] diff --git a/tests/unit/test_yaml_renderer.py b/tests/unit/parser/test_schema_renderer.py similarity index 100% rename from tests/unit/test_yaml_renderer.py rename to tests/unit/parser/test_schema_renderer.py diff --git a/tests/unit/test_unit_test_parser.py b/tests/unit/parser/test_unit_tests.py similarity index 95% rename from tests/unit/test_unit_test_parser.py rename to tests/unit/parser/test_unit_tests.py index 9f6fb0ced55..e8725bed718 100644 --- a/tests/unit/test_unit_test_parser.py +++ b/tests/unit/parser/test_unit_tests.py @@ -1,14 +1,12 @@ -from dbt.contracts.graph.nodes import UnitTestDefinition, NodeType -from dbt.artifacts.resources import UnitTestConfig, DependsOn -from dbt.parser import SchemaParser -from dbt.parser.unit_tests import UnitTestParser - -from .utils import MockNode -from .test_parser import SchemaParserTest, assertEqualNodes - from unittest import mock -from dbt.contracts.graph.unparsed import UnitTestOutputFixture +from dbt.artifacts.resources import DependsOn, UnitTestConfig +from dbt.contracts.graph.nodes import NodeType, UnitTestDefinition +from dbt.contracts.graph.unparsed import UnitTestOutputFixture +from dbt.parser import SchemaParser +from dbt.parser.unit_tests import UnitTestParser +from tests.unit.parser.test_parser import SchemaParserTest, assertEqualNodes +from tests.unit.utils import MockNode UNIT_TEST_MODEL_NOT_FOUND_SOURCE = """ unit_tests: diff --git a/tests/unit/test_plugin_manager.py b/tests/unit/plugins/test_manager.py similarity index 95% rename from tests/unit/test_plugin_manager.py rename to tests/unit/plugins/test_manager.py index bf25d810729..b67677879be 100644 --- a/tests/unit/test_plugin_manager.py +++ b/tests/unit/plugins/test_manager.py @@ -1,11 +1,12 @@ -import pytest from unittest import mock +import pytest + from dbt.exceptions import DbtRuntimeError -from dbt.plugins import PluginManager, dbtPlugin, dbt_hook -from dbt.plugins.manifest import PluginNodes, ModelNodeArgs -from dbt.plugins.contracts import PluginArtifacts, PluginArtifact +from dbt.plugins import PluginManager, dbt_hook, dbtPlugin +from dbt.plugins.contracts import PluginArtifact, PluginArtifacts from dbt.plugins.exceptions import dbtPluginError +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes class ExceptionInitializePlugin(dbtPlugin): diff --git a/tests/unit/task/__init__.py b/tests/unit/task/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/task/docs/__init__.py b/tests/unit/task/docs/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/task/docs/test_serve.py b/tests/unit/task/docs/test_serve.py new file mode 100644 index 00000000000..cedb234a205 --- /dev/null +++ b/tests/unit/task/docs/test_serve.py @@ -0,0 +1,23 @@ +from http.server import SimpleHTTPRequestHandler +from unittest.mock import MagicMock, patch + +import pytest + +from dbt.task.docs.serve import ServeTask + + +@pytest.fixture +def serve_task(): + # Set up + task = ServeTask(config=MagicMock(), args=MagicMock()) + task.config.project_target_path = "." + task.args.port = 8000 + return task + + +def test_serve_bind_to_127(serve_task): + serve_task.args.browser = False + with patch("dbt.task.docs.serve.socketserver.TCPServer") as patched_TCPServer: + patched_TCPServer.return_value = MagicMock() + serve_task.run() + patched_TCPServer.assert_called_once_with(("127.0.0.1", 8000), SimpleHTTPRequestHandler) diff --git a/tests/unit/task/test_base.py b/tests/unit/task/test_base.py index b8f84fffa5e..c28d59f0df7 100644 --- a/tests/unit/task/test_base.py +++ b/tests/unit/task/test_base.py @@ -1,8 +1,8 @@ import os -from dbt.task.base import BaseRunner, ConfiguredTask -from dbt.contracts.graph.nodes import SourceDefinition -import dbt_common.exceptions +import dbt_common.exceptions +from dbt.contracts.graph.nodes import SourceDefinition +from dbt.task.base import BaseRunner, ConfiguredTask from tests.unit.config import BaseConfigTest INITIAL_ROOT = os.getcwd() diff --git a/tests/unit/test_docs_generate.py b/tests/unit/task/test_docs.py similarity index 100% rename from tests/unit/test_docs_generate.py rename to tests/unit/task/test_docs.py index 25cb2655a72..171b081c781 100644 --- a/tests/unit/test_docs_generate.py +++ b/tests/unit/task/test_docs.py @@ -1,6 +1,6 @@ +import unittest from decimal import Decimal from unittest import mock -import unittest from dbt.task.docs import generate diff --git a/tests/unit/task/test_freshness.py b/tests/unit/task/test_freshness.py index 05c00df75da..4816ae98f05 100644 --- a/tests/unit/task/test_freshness.py +++ b/tests/unit/task/test_freshness.py @@ -1,8 +1,9 @@ import datetime -import pytest from unittest import mock -from dbt.task.freshness import FreshnessTask, FreshnessResponse +import pytest + +from dbt.task.freshness import FreshnessResponse, FreshnessTask class TestFreshnessTaskMetadataCache: diff --git a/tests/unit/task/test_list.py b/tests/unit/task/test_list.py new file mode 100644 index 00000000000..da701fe2fcf --- /dev/null +++ b/tests/unit/task/test_list.py @@ -0,0 +1,22 @@ +from argparse import Namespace +from unittest.mock import patch + +from dbt.flags import get_flags, set_from_args +from dbt.task.list import ListTask +from dbt_common.events.types import PrintEvent + + +def test_list_output_results(): + set_from_args(Namespace(models=None), {}) + task = ListTask(get_flags(), None, None) + results = ["node1", "node2", "node3"] + expected_node_results = ["node1", "node2", "node3"] + + with patch("dbt.task.list.fire_event") as mock_fire_event: + node_results = task.output_results(results) + + assert node_results == expected_node_results + # assert called with PrintEvent type object and message 'node1', 'node2', 'node3' + for call_args in mock_fire_event.call_args_list: + assert isinstance(call_args[0][0], PrintEvent) + assert call_args[0][0].msg in expected_node_results diff --git a/tests/unit/test_retry_commands.py b/tests/unit/task/test_retry.py similarity index 89% rename from tests/unit/test_retry_commands.py rename to tests/unit/task/test_retry.py index 3eb151cb6a3..0e0c8898b4e 100644 --- a/tests/unit/test_retry_commands.py +++ b/tests/unit/task/test_retry.py @@ -1,5 +1,5 @@ from dbt.cli.types import Command -from dbt.task.retry import TASK_DICT, CMD_DICT +from dbt.task.retry import CMD_DICT, TASK_DICT EXCLUDED_COMMANDS = { "clean", diff --git a/tests/unit/task/test_runnable.py b/tests/unit/task/test_runnable.py index 50fda4f2c82..17e09830892 100644 --- a/tests/unit/task/test_runnable.py +++ b/tests/unit/task/test_runnable.py @@ -1,10 +1,14 @@ -import pytest - from dataclasses import dataclass -from dbt.task.runnable import GraphRunnableTask -from typing import AbstractSet, Any, Dict, Optional +from typing import AbstractSet, Any, Dict, List, Optional, Tuple + +import networkx as nx +import pytest +from dbt.artifacts.resources.types import NodeType +from dbt.graph import Graph, ResourceTypeSelector +from dbt.task.runnable import GraphRunnableMode, GraphRunnableTask from dbt.tests.util import safe_set_invocation_context +from tests.unit.utils import MockNode, make_manifest @dataclass @@ -14,6 +18,9 @@ class MockArgs: state: Optional[Dict[str, Any]] = None defer_state: Optional[Dict[str, Any]] = None write_json: bool = False + selector: Optional[str] = None + select: Tuple[str] = () + exclude: Tuple[str] = () @dataclass @@ -23,12 +30,28 @@ class MockConfig: threads: int = 1 target_name: str = "mock_config_target_name" + def get_default_selector_name(self): + return None + class MockRunnableTask(GraphRunnableTask): - def __init__(self, exception_class: Exception = Exception): + def __init__( + self, + exception_class: Exception = Exception, + nodes: Optional[List[MockNode]] = None, + edges: Optional[List[Tuple[str, str]]] = None, + ): + nodes = nodes or [] + edges = edges or [] + self.forced_exception_class = exception_class self.did_cancel: bool = False super().__init__(args=MockArgs(), config=MockConfig(), manifest=None) + self.manifest = make_manifest(nodes=nodes) + digraph = nx.DiGraph() + for edge in edges: + digraph.add_edge(edge[0], edge[1]) + self.graph = Graph(digraph) def run_queue(self, pool): """Override `run_queue` to raise a system exit""" @@ -40,13 +63,25 @@ def _cancel_connections(self, pool): def get_node_selector(self): """This is an `abstract_method` on `GraphRunnableTask`, thus we must implement it""" - return None + selector = ResourceTypeSelector( + graph=self.graph, + manifest=self.manifest, + previous_state=self.previous_state, + resource_types=[NodeType.Model], + include_empty_nodes=True, + ) + return selector def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): """This is an `abstract_method` on `GraphRunnableTask`, thus we must implement it""" return None +class MockRunnableTaskIndependent(MockRunnableTask): + def get_run_mode(self) -> GraphRunnableMode: + return GraphRunnableMode.Independent + + def test_graph_runnable_task_cancels_connection_on_system_exit(): safe_set_invocation_context() @@ -81,3 +116,36 @@ def test_graph_runnable_task_doesnt_cancel_connection_on_generic_exception(): # If `did_cancel` is True, that means `_cancel_connections` was called assert task.did_cancel is False + + +def test_graph_runnable_preserves_edges_by_default(): + task = MockRunnableTask( + nodes=[ + MockNode("test", "upstream_node", fqn="model.test.upstream_node"), + MockNode("test", "downstream_node", fqn="model.test.downstream_node"), + ], + edges=[("model.test.upstream_node", "model.test.downstream_node")], + ) + assert task.get_run_mode() == GraphRunnableMode.Topological + graph_queue = task.get_graph_queue() + + assert graph_queue.queued == {"model.test.upstream_node"} + assert graph_queue.inner.queue == [(0, "model.test.upstream_node")] + + +def test_graph_runnable_preserves_edges_false(): + task = MockRunnableTaskIndependent( + nodes=[ + MockNode("test", "upstream_node", fqn="model.test.upstream_node"), + MockNode("test", "downstream_node", fqn="model.test.downstream_node"), + ], + edges=[("model.test.upstream_node", "model.test.downstream_node")], + ) + assert task.get_run_mode() == GraphRunnableMode.Independent + graph_queue = task.get_graph_queue() + + assert graph_queue.queued == {"model.test.downstream_node", "model.test.upstream_node"} + assert graph_queue.inner.queue == [ + (0, "model.test.downstream_node"), + (0, "model.test.upstream_node"), + ] diff --git a/tests/unit/task/test_test.py b/tests/unit/task/test_test.py new file mode 100644 index 00000000000..350f80950eb --- /dev/null +++ b/tests/unit/task/test_test.py @@ -0,0 +1,71 @@ +import agate +import pytest + +from dbt.task.test import list_rows_from_table + + +class TestListRowsFromTable: + @pytest.mark.parametrize( + "agate_table_cols,agate_table_rows,expected_list_rows", + [ + (["a", "b", "c"], [], [["a", "b", "c"]]), # no rows + (["a", "b", "c"], [[1, 2, 3]], [["a", "b", "c"], [1, 2, 3]]), # single row, no nulls + ( + ["a", "b", "c"], + [[1, 2, 3], [2, 3, 4]], + [["a", "b", "c"], [1, 2, 3], [2, 3, 4]], + ), # multiple rows + ( + ["a", "b", "c"], + [[None, 2, 3], [2, None, 4]], + [["a", "b", "c"], [None, 2, 3], [2, None, 4]], + ), # multiple rows, with nulls + ], + ) + def test_list_rows_from_table_no_sort( + self, agate_table_cols, agate_table_rows, expected_list_rows + ): + table = agate.Table(rows=agate_table_rows, column_names=agate_table_cols) + + list_rows = list_rows_from_table(table) + assert list_rows == expected_list_rows + + @pytest.mark.parametrize( + "agate_table_cols,agate_table_rows,expected_list_rows", + [ + (["a", "b", "c"], [], [["a", "b", "c"]]), # no rows + (["a", "b", "c"], [[1, 2, 3]], [["a", "b", "c"], [1, 2, 3]]), # single row, no nulls + ( + ["a", "b", "c"], + [[1, 2, 3], [2, 3, 4]], + [["a", "b", "c"], [1, 2, 3], [2, 3, 4]], + ), # multiple rows, in order + ( + ["a", "b", "c"], + [[2, 3, 4], [1, 2, 3]], + [["a", "b", "c"], [1, 2, 3], [2, 3, 4]], + ), # multiple rows, out of order + ( + ["a", "b", "c"], + [[None, 2, 3], [2, 3, 4]], + [["a", "b", "c"], [2, 3, 4], [None, 2, 3]], + ), # multiple rows, out of order with nulls in first position + ( + ["a", "b", "c"], + [[4, 5, 6], [1, None, 3]], + [["a", "b", "c"], [1, None, 3], [4, 5, 6]], + ), # multiple rows, out of order with null in non-first position + ( + ["a", "b", "c"], + [[None, 5, 6], [1, None, 3]], + [["a", "b", "c"], [1, None, 3], [None, 5, 6]], + ), # multiple rows, out of order with nulls in many positions + ], + ) + def test_list_rows_from_table_with_sort( + self, agate_table_cols, agate_table_rows, expected_list_rows + ): + table = agate.Table(rows=agate_table_rows, column_names=agate_table_cols) + + list_rows = list_rows_from_table(table, sort=True) + assert list_rows == expected_list_rows diff --git a/tests/unit/test_linker.py b/tests/unit/test_compilation.py similarity index 50% rename from tests/unit/test_linker.py rename to tests/unit/test_compilation.py index d1d09532e12..c18e7fb15d2 100644 --- a/tests/unit/test_linker.py +++ b/tests/unit/test_compilation.py @@ -1,17 +1,14 @@ import os import tempfile -import unittest -from unittest import mock - -from dbt import compilation -from dbt.graph.selector import NodeSelector -from dbt.graph.cli import parse_difference from queue import Empty +from unittest import mock -from dbt.flags import set_from_args -from argparse import Namespace +import pytest -set_from_args(Namespace(WARN_ERROR=False), None) +from dbt.compilation import Graph, Linker +from dbt.graph.cli import parse_difference +from dbt.graph.queue import GraphQueue +from dbt.graph.selector import NodeSelector def _mock_manifest(nodes): @@ -34,158 +31,165 @@ def _mock_manifest(nodes): return manifest -class LinkerTest(unittest.TestCase): - def setUp(self): - self.linker = compilation.Linker() +class TestLinker: + @pytest.fixture + def linker(self) -> Linker: + return Linker() - def test_linker_add_node(self): + def test_linker_add_node(self, linker: Linker) -> None: expected_nodes = ["A", "B", "C"] for node in expected_nodes: - self.linker.add_node(node) + linker.add_node(node) - actual_nodes = self.linker.nodes() + actual_nodes = linker.nodes() for node in expected_nodes: - self.assertIn(node, actual_nodes) + assert node in actual_nodes - self.assertEqual(len(actual_nodes), len(expected_nodes)) + assert len(actual_nodes) == len(expected_nodes) - def test_linker_write_graph(self): + def test_linker_write_graph(self, linker: Linker) -> None: expected_nodes = ["A", "B", "C"] for node in expected_nodes: - self.linker.add_node(node) + linker.add_node(node) manifest = _mock_manifest("ABC") (fd, fname) = tempfile.mkstemp() os.close(fd) try: - self.linker.write_graph(fname, manifest) + linker.write_graph(fname, manifest) assert os.path.exists(fname) finally: os.unlink(fname) - def assert_would_join(self, queue): + def assert_would_join(self, queue: GraphQueue) -> None: """test join() without timeout risk""" - self.assertEqual(queue.inner.unfinished_tasks, 0) - - def _get_graph_queue(self, manifest, include=None, exclude=None): - graph = compilation.Graph(self.linker.graph) + assert queue.inner.unfinished_tasks == 0 + + def _get_graph_queue( + self, + manifest, + linker: Linker, + include=None, + exclude=None, + ) -> GraphQueue: + graph = Graph(linker.graph) selector = NodeSelector(graph, manifest) # TODO: The "eager" string below needs to be replaced with programatic access # to the default value for the indirect selection parameter in # dbt.cli.params.indirect_selection # # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - spec = parse_difference(include, exclude, "eager") + spec = parse_difference(include, exclude) return selector.get_graph_queue(spec) - def test_linker_add_dependency(self): + def test_linker_add_dependency(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("A", "C"), ("B", "C")] for (l, r) in actual_deps: - self.linker.dependency(l, r) + linker.dependency(l, r) - queue = self._get_graph_queue(_mock_manifest("ABC")) + queue = self._get_graph_queue(_mock_manifest("ABC"), linker) got = queue.get(block=False) - self.assertEqual(got.unique_id, "C") - with self.assertRaises(Empty): + assert got.unique_id == "C" + with pytest.raises(Empty): queue.get(block=False) - self.assertFalse(queue.empty()) + assert not queue.empty() queue.mark_done("C") - self.assertFalse(queue.empty()) + assert not queue.empty() got = queue.get(block=False) - self.assertEqual(got.unique_id, "B") - with self.assertRaises(Empty): + assert got.unique_id == "B" + with pytest.raises(Empty): queue.get(block=False) - self.assertFalse(queue.empty()) + assert not queue.empty() queue.mark_done("B") - self.assertFalse(queue.empty()) + assert not queue.empty() got = queue.get(block=False) - self.assertEqual(got.unique_id, "A") - with self.assertRaises(Empty): + assert got.unique_id == "A" + with pytest.raises(Empty): queue.get(block=False) - self.assertTrue(queue.empty()) + assert queue.empty() queue.mark_done("A") self.assert_would_join(queue) - self.assertTrue(queue.empty()) + assert queue.empty() - def test_linker_add_disjoint_dependencies(self): + def test_linker_add_disjoint_dependencies(self, linker: Linker) -> None: actual_deps = [("A", "B")] additional_node = "Z" for (l, r) in actual_deps: - self.linker.dependency(l, r) - self.linker.add_node(additional_node) + linker.dependency(l, r) + linker.add_node(additional_node) - queue = self._get_graph_queue(_mock_manifest("ABCZ")) + queue = self._get_graph_queue(_mock_manifest("ABCZ"), linker) # the first one we get must be B, it has the longest dep chain first = queue.get(block=False) - self.assertEqual(first.unique_id, "B") - self.assertFalse(queue.empty()) + assert first.unique_id == "B" + assert not queue.empty() queue.mark_done("B") - self.assertFalse(queue.empty()) + assert not queue.empty() second = queue.get(block=False) - self.assertIn(second.unique_id, {"A", "Z"}) - self.assertFalse(queue.empty()) + assert second.unique_id in {"A", "Z"} + assert not queue.empty() queue.mark_done(second.unique_id) - self.assertFalse(queue.empty()) + assert not queue.empty() third = queue.get(block=False) - self.assertIn(third.unique_id, {"A", "Z"}) - with self.assertRaises(Empty): + assert third.unique_id in {"A", "Z"} + with pytest.raises(Empty): queue.get(block=False) - self.assertNotEqual(second.unique_id, third.unique_id) - self.assertTrue(queue.empty()) + assert second.unique_id != third.unique_id + assert queue.empty() queue.mark_done(third.unique_id) self.assert_would_join(queue) - self.assertTrue(queue.empty()) + assert queue.empty() - def test_linker_dependencies_limited_to_some_nodes(self): + def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] for (l, r) in actual_deps: - self.linker.dependency(l, r) + linker.dependency(l, r) - queue = self._get_graph_queue(_mock_manifest("ABCD"), ["B"]) + queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"]) got = queue.get(block=False) - self.assertEqual(got.unique_id, "B") - self.assertTrue(queue.empty()) + assert got.unique_id == "B" + assert queue.empty() queue.mark_done("B") self.assert_would_join(queue) - queue_2 = queue = self._get_graph_queue(_mock_manifest("ABCD"), ["A", "B"]) + queue_2 = queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["A", "B"]) got = queue_2.get(block=False) - self.assertEqual(got.unique_id, "B") - self.assertFalse(queue_2.empty()) - with self.assertRaises(Empty): + assert got.unique_id == "B" + assert not queue_2.empty() + with pytest.raises(Empty): queue_2.get(block=False) queue_2.mark_done("B") - self.assertFalse(queue_2.empty()) + assert not queue_2.empty() got = queue_2.get(block=False) - self.assertEqual(got.unique_id, "A") - self.assertTrue(queue_2.empty()) - with self.assertRaises(Empty): + assert got.unique_id == "A" + assert queue_2.empty() + with pytest.raises(Empty): queue_2.get(block=False) - self.assertTrue(queue_2.empty()) + assert queue_2.empty() queue_2.mark_done("A") self.assert_would_join(queue_2) - def test__find_cycles__cycles(self): + def test__find_cycles__cycles(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("B", "C"), ("C", "A")] for (l, r) in actual_deps: - self.linker.dependency(l, r) + linker.dependency(l, r) - self.assertIsNotNone(self.linker.find_cycles()) + assert linker.find_cycles() is not None - def test__find_cycles__no_cycles(self): + def test__find_cycles__no_cycles(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] for (l, r) in actual_deps: - self.linker.dependency(l, r) + linker.dependency(l, r) - self.assertIsNone(self.linker.find_cycles()) + assert linker.find_cycles() is None diff --git a/tests/unit/test_contracts_graph_unparsed.py b/tests/unit/test_contracts_graph_unparsed.py deleted file mode 100644 index 16cc5c7c4b7..00000000000 --- a/tests/unit/test_contracts_graph_unparsed.py +++ /dev/null @@ -1,984 +0,0 @@ -from datetime import timedelta -import pickle -import pytest - -from dbt.contracts.graph.unparsed import ( - UnparsedNode, - UnparsedRunHook, - UnparsedMacro, - UnparsedSourceDefinition, - UnparsedSourceTableDefinition, - UnparsedDocumentationFile, - UnparsedColumn, - UnparsedNodeUpdate, - UnparsedModelUpdate, - Docs, - UnparsedExposure, - UnparsedMetric, - UnparsedMetricTypeParams, - UnparsedMetricInputMeasure, - UnparsedVersion, -) -from dbt.artifacts.resources import ( - ExposureType, - FreshnessThreshold, - MaturityType, - Owner, - Quoting, - Time, -) -from dbt.artifacts.resources.types import TimePeriod -from dbt.artifacts.schemas.results import FreshnessStatus -from dbt.node_types import NodeType -from .utils import ContractTestCase - - -class TestUnparsedMacro(ContractTestCase): - ContractType = UnparsedMacro - - def test_ok(self): - macro_dict = { - "path": "/root/path.sql", - "original_file_path": "/root/path.sql", - "package_name": "test", - "language": "sql", - "raw_code": "{% macro foo() %}select 1 as id{% endmacro %}", - "resource_type": "macro", - } - macro = self.ContractType( - path="/root/path.sql", - original_file_path="/root/path.sql", - package_name="test", - language="sql", - raw_code="{% macro foo() %}select 1 as id{% endmacro %}", - resource_type=NodeType.Macro, - ) - self.assert_symmetric(macro, macro_dict) - pickle.loads(pickle.dumps(macro)) - - def test_invalid_missing_field(self): - macro_dict = { - "path": "/root/path.sql", - "original_file_path": "/root/path.sql", - # 'package_name': 'test', - "language": "sql", - "raw_code": "{% macro foo() %}select 1 as id{% endmacro %}", - "resource_type": "macro", - } - self.assert_fails_validation(macro_dict) - - def test_invalid_extra_field(self): - macro_dict = { - "path": "/root/path.sql", - "original_file_path": "/root/path.sql", - "package_name": "test", - "language": "sql", - "raw_code": "{% macro foo() %}select 1 as id{% endmacro %}", - "extra": "extra", - "resource_type": "macro", - } - self.assert_fails_validation(macro_dict) - - -class TestUnparsedNode(ContractTestCase): - ContractType = UnparsedNode - - def test_ok(self): - node_dict = { - "name": "foo", - "resource_type": NodeType.Model, - "path": "/root/x/path.sql", - "original_file_path": "/root/path.sql", - "package_name": "test", - "language": "sql", - "raw_code": 'select * from {{ ref("thing") }}', - } - node = self.ContractType( - package_name="test", - path="/root/x/path.sql", - original_file_path="/root/path.sql", - language="sql", - raw_code='select * from {{ ref("thing") }}', - name="foo", - resource_type=NodeType.Model, - ) - self.assert_symmetric(node, node_dict) - self.assertFalse(node.empty) - - self.assert_fails_validation(node_dict, cls=UnparsedRunHook) - self.assert_fails_validation(node_dict, cls=UnparsedMacro) - pickle.loads(pickle.dumps(node)) - - def test_empty(self): - node_dict = { - "name": "foo", - "resource_type": NodeType.Model, - "path": "/root/x/path.sql", - "original_file_path": "/root/path.sql", - "package_name": "test", - "language": "sql", - "raw_code": " \n", - } - node = UnparsedNode( - package_name="test", - path="/root/x/path.sql", - original_file_path="/root/path.sql", - language="sql", - raw_code=" \n", - name="foo", - resource_type=NodeType.Model, - ) - self.assert_symmetric(node, node_dict) - self.assertTrue(node.empty) - - self.assert_fails_validation(node_dict, cls=UnparsedRunHook) - self.assert_fails_validation(node_dict, cls=UnparsedMacro) - - -class TestUnparsedRunHook(ContractTestCase): - ContractType = UnparsedRunHook - - def test_ok(self): - node_dict = { - "name": "foo", - "resource_type": NodeType.Operation, - "path": "/root/dbt_project.yml", - "original_file_path": "/root/dbt_project.yml", - "package_name": "test", - "language": "sql", - "raw_code": "GRANT select on dbt_postgres", - "index": 4, - } - node = self.ContractType( - package_name="test", - path="/root/dbt_project.yml", - original_file_path="/root/dbt_project.yml", - language="sql", - raw_code="GRANT select on dbt_postgres", - name="foo", - resource_type=NodeType.Operation, - index=4, - ) - self.assert_symmetric(node, node_dict) - self.assert_fails_validation(node_dict, cls=UnparsedNode) - pickle.loads(pickle.dumps(node)) - - def test_bad_type(self): - node_dict = { - "name": "foo", - "resource_type": NodeType.Model, # invalid - "path": "/root/dbt_project.yml", - "original_file_path": "/root/dbt_project.yml", - "package_name": "test", - "language": "sql", - "raw_code": "GRANT select on dbt_postgres", - "index": 4, - } - self.assert_fails_validation(node_dict) - - -class TestFreshnessThreshold(ContractTestCase): - ContractType = FreshnessThreshold - - def test_empty(self): - empty = self.ContractType() - self.assert_symmetric(empty, {"error_after": {}, "warn_after": {}}) - self.assertEqual(empty.status(float("Inf")), FreshnessStatus.Pass) - self.assertEqual(empty.status(0), FreshnessStatus.Pass) - - def test_both(self): - threshold = self.ContractType( - warn_after=Time(count=18, period=TimePeriod.hour), - error_after=Time(count=2, period=TimePeriod.day), - ) - dct = { - "error_after": {"count": 2, "period": "day"}, - "warn_after": {"count": 18, "period": "hour"}, - } - self.assert_symmetric(threshold, dct) - - error_seconds = timedelta(days=3).total_seconds() - warn_seconds = timedelta(days=1).total_seconds() - pass_seconds = timedelta(hours=3).total_seconds() - self.assertEqual(threshold.status(error_seconds), FreshnessStatus.Error) - self.assertEqual(threshold.status(warn_seconds), FreshnessStatus.Warn) - self.assertEqual(threshold.status(pass_seconds), FreshnessStatus.Pass) - pickle.loads(pickle.dumps(threshold)) - - def test_merged(self): - t1 = self.ContractType( - warn_after=Time(count=36, period=TimePeriod.hour), - error_after=Time(count=2, period=TimePeriod.day), - ) - t2 = self.ContractType( - warn_after=Time(count=18, period=TimePeriod.hour), - ) - threshold = self.ContractType( - warn_after=Time(count=18, period=TimePeriod.hour), - error_after=Time(count=None, period=None), - ) - self.assertEqual(threshold, t1.merged(t2)) - - warn_seconds = timedelta(days=1).total_seconds() - pass_seconds = timedelta(hours=3).total_seconds() - self.assertEqual(threshold.status(warn_seconds), FreshnessStatus.Warn) - self.assertEqual(threshold.status(pass_seconds), FreshnessStatus.Pass) - - -class TestQuoting(ContractTestCase): - ContractType = Quoting - - def test_empty(self): - empty = self.ContractType() - self.assert_symmetric(empty, {}) - - def test_partial(self): - a = self.ContractType(None, True, False) - b = self.ContractType(True, False, None) - self.assert_symmetric(a, {"schema": True, "identifier": False}) - self.assert_symmetric(b, {"database": True, "schema": False}) - - c = a.merged(b) - self.assertEqual(c, self.ContractType(True, False, False)) - self.assert_symmetric(c, {"database": True, "schema": False, "identifier": False}) - pickle.loads(pickle.dumps(c)) - - -class TestUnparsedSourceDefinition(ContractTestCase): - ContractType = UnparsedSourceDefinition - - def test_defaults(self): - minimum = self.ContractType(name="foo") - from_dict = {"name": "foo"} - to_dict = { - "name": "foo", - "description": "", - "freshness": {"error_after": {}, "warn_after": {}}, - "quoting": {}, - "tables": [], - "loader": "", - "meta": {}, - "tags": [], - "config": {}, - } - self.assert_from_dict(minimum, from_dict) - self.assert_to_dict(minimum, to_dict) - - def test_contents(self): - empty = self.ContractType( - name="foo", - description="a description", - quoting=Quoting(database=False), - loader="some_loader", - freshness=FreshnessThreshold(), - tables=[], - meta={}, - ) - dct = { - "name": "foo", - "description": "a description", - "quoting": {"database": False}, - "loader": "some_loader", - "freshness": {"error_after": {}, "warn_after": {}}, - "tables": [], - "meta": {}, - "tags": [], - "config": {}, - } - self.assert_symmetric(empty, dct) - - def test_table_defaults(self): - table_1 = UnparsedSourceTableDefinition(name="table1") - table_2 = UnparsedSourceTableDefinition( - name="table2", - description="table 2", - quoting=Quoting(database=True), - ) - source = self.ContractType(name="foo", tables=[table_1, table_2]) - from_dict = { - "name": "foo", - "tables": [ - {"name": "table1"}, - { - "name": "table2", - "description": "table 2", - "quoting": {"database": True}, - }, - ], - } - to_dict = { - "name": "foo", - "description": "", - "config": {}, - "loader": "", - "freshness": {"error_after": {}, "warn_after": {}}, - "quoting": {}, - "meta": {}, - "tables": [ - { - "name": "table1", - "description": "", - "config": {}, - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "columns": [], - "constraints": [], - "quoting": {}, - "freshness": {"error_after": {}, "warn_after": {}}, - "meta": {}, - "tags": [], - }, - { - "name": "table2", - "description": "table 2", - "config": {}, - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "columns": [], - "constraints": [], - "quoting": {"database": True}, - "freshness": {"error_after": {}, "warn_after": {}}, - "meta": {}, - "tags": [], - }, - ], - "tags": [], - } - self.assert_from_dict(source, from_dict) - self.assert_symmetric(source, to_dict) - pickle.loads(pickle.dumps(source)) - - -class TestUnparsedDocumentationFile(ContractTestCase): - ContractType = UnparsedDocumentationFile - - def test_ok(self): - doc = self.ContractType( - package_name="test", - path="/root/docs", - original_file_path="/root/docs/doc.md", - file_contents="blah blah blah", - ) - doc_dict = { - "package_name": "test", - "path": "/root/docs", - "original_file_path": "/root/docs/doc.md", - "file_contents": "blah blah blah", - } - self.assert_symmetric(doc, doc_dict) - self.assertEqual(doc.resource_type, NodeType.Documentation) - self.assert_fails_validation(doc_dict, UnparsedNode) - pickle.loads(pickle.dumps(doc)) - - def test_extra_field(self): - self.assert_fails_validation({}) - doc_dict = { - "package_name": "test", - "path": "/root/docs", - "original_file_path": "/root/docs/doc.md", - "file_contents": "blah blah blah", - "resource_type": "docs", - } - self.assert_fails_validation(doc_dict) - - -class TestUnparsedNodeUpdate(ContractTestCase): - ContractType = UnparsedNodeUpdate - - def test_defaults(self): - minimum = self.ContractType( - name="foo", - yaml_key="models", - original_file_path="/some/fake/path", - package_name="test", - ) - from_dict = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - } - to_dict = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "columns": [], - "description": "", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {}, - "config": {}, - "constraints": [], - } - self.assert_from_dict(minimum, from_dict) - self.assert_to_dict(minimum, to_dict) - - def test_contents(self): - update = self.ContractType( - name="foo", - yaml_key="models", - original_file_path="/some/fake/path", - package_name="test", - description="a description", - data_tests=["table_test"], - meta={"key": ["value1", "value2"]}, - columns=[ - UnparsedColumn( - name="x", - description="x description", - meta={"key2": "value3"}, - ), - UnparsedColumn( - name="y", - description="y description", - data_tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], - meta={}, - tags=["a", "b"], - ), - ], - docs=Docs(show=False), - ) - dct = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "constraints": [], - "columns": [ - { - "name": "x", - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - "tags": [], - "constraints": [], - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "tags": ["a", "b"], - "constraints": [], - }, - ], - "docs": {"show": False}, - "config": {}, - } - self.assert_symmetric(update, dct) - pickle.loads(pickle.dumps(update)) - - def test_bad_test_type(self): - dct = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "columns": [ - { - "name": "x", - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "yaml_key": "models", - "original_file_path": "/some/fake/path", - }, - ], - "docs": {"show": True}, - } - self.assert_fails_validation(dct) - - dct = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "columns": [ - # column missing a name - { - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "yaml_key": "models", - "original_file_path": "/some/fake/path", - }, - ], - "docs": {"show": True}, - } - self.assert_fails_validation(dct) - - # missing a name - dct = { - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "columns": [ - { - "name": "x", - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "yaml_key": "models", - "original_file_path": "/some/fake/path", - }, - ], - "docs": {"show": True}, - } - self.assert_fails_validation(dct) - - -class TestUnparsedModelUpdate(ContractTestCase): - ContractType = UnparsedModelUpdate - - def test_defaults(self): - minimum = self.ContractType( - name="foo", - yaml_key="models", - original_file_path="/some/fake/path", - package_name="test", - ) - from_dict = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - } - to_dict = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "columns": [], - "description": "", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {}, - "config": {}, - "constraints": [], - "versions": [], - } - self.assert_from_dict(minimum, from_dict) - self.assert_to_dict(minimum, to_dict) - - def test_contents(self): - update = self.ContractType( - name="foo", - yaml_key="models", - original_file_path="/some/fake/path", - package_name="test", - description="a description", - data_tests=["table_test"], - meta={"key": ["value1", "value2"]}, - columns=[ - UnparsedColumn( - name="x", - description="x description", - meta={"key2": "value3"}, - ), - UnparsedColumn( - name="y", - description="y description", - data_tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], - meta={}, - tags=["a", "b"], - ), - ], - docs=Docs(show=False), - versions=[UnparsedVersion(v=2)], - ) - dct = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "constraints": [], - "versions": [ - { - "v": 2, - "description": "", - "columns": [], - "config": {}, - "constraints": [], - "docs": {"show": True}, - } - ], - "columns": [ - { - "name": "x", - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - "tags": [], - "constraints": [], - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "tags": ["a", "b"], - "constraints": [], - }, - ], - "docs": {"show": False}, - "config": {}, - } - self.assert_symmetric(update, dct) - pickle.loads(pickle.dumps(update)) - - def test_bad_test_type(self): - dct = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "columns": [ - { - "name": "x", - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "yaml_key": "models", - "original_file_path": "/some/fake/path", - }, - ], - "docs": {"show": True}, - } - self.assert_fails_validation(dct) - - dct = { - "name": "foo", - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "columns": [ - # column missing a name - { - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "yaml_key": "models", - "original_file_path": "/some/fake/path", - }, - ], - "docs": {"show": True}, - } - self.assert_fails_validation(dct) - - # missing a name - dct = { - "yaml_key": "models", - "original_file_path": "/some/fake/path", - "package_name": "test", - "description": "a description", - "data_tests": ["table_test"], - "tests": [], - "meta": {"key": ["value1", "value2"]}, - "columns": [ - { - "name": "x", - "description": "x description", - "docs": {"show": True}, - "data_tests": [], - "tests": [], - "meta": {"key2": "value3"}, - }, - { - "name": "y", - "description": "y description", - "docs": {"show": True}, - "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], - "tests": [], - "meta": {}, - "yaml_key": "models", - "original_file_path": "/some/fake/path", - }, - ], - "docs": {"show": True}, - } - self.assert_fails_validation(dct) - - -class TestUnparsedExposure(ContractTestCase): - ContractType = UnparsedExposure - - def get_ok_dict(self): - return { - "name": "my_exposure", - "type": "dashboard", - "owner": {"name": "example", "email": "name@example.com", "slack": "#channel"}, - "maturity": "medium", - "meta": {"tool": "my_tool"}, - "tags": ["my_department"], - "url": "https://example.com/dashboards/1", - "description": "A exposure", - "config": {}, - "depends_on": [ - 'ref("my_model")', - 'source("raw", "source_table")', - ], - } - - def test_ok(self): - exposure = self.ContractType( - name="my_exposure", - type=ExposureType.Dashboard, - owner=Owner(name="example", email="name@example.com", _extra={"slack": "#channel"}), - maturity=MaturityType.Medium, - url="https://example.com/dashboards/1", - description="A exposure", - config={}, - meta={"tool": "my_tool"}, - tags=["my_department"], - depends_on=['ref("my_model")', 'source("raw", "source_table")'], - ) - dct = self.get_ok_dict() - self.assert_symmetric(exposure, dct) - pickle.loads(pickle.dumps(exposure)) - - def test_ok_exposures(self): - for exposure_allowed in ("dashboard", "notebook", "analysis", "ml", "application"): - tst = self.get_ok_dict() - tst["type"] = exposure_allowed - assert self.ContractType.from_dict(tst).type == exposure_allowed - - def test_bad_exposure(self): - # bad exposure: None isn't allowed - for exposure_not_allowed in (None, "not an exposure"): - tst = self.get_ok_dict() - tst["type"] = exposure_not_allowed - self.assert_fails_validation(tst) - - def test_no_exposure(self): - tst = self.get_ok_dict() - del tst["type"] - self.assert_fails_validation(tst) - - def test_ok_maturities(self): - for maturity_allowed in (None, "low", "medium", "high"): - tst = self.get_ok_dict() - tst["maturity"] = maturity_allowed - assert self.ContractType.from_dict(tst).maturity == maturity_allowed - - tst = self.get_ok_dict() - del tst["maturity"] - assert self.ContractType.from_dict(tst).maturity is None - - def test_bad_maturity(self): - tst = self.get_ok_dict() - tst["maturity"] = "invalid maturity" - self.assert_fails_validation(tst) - - def test_bad_owner_missing_things(self): - tst = self.get_ok_dict() - del tst["owner"]["email"] - del tst["owner"]["name"] - self.assert_fails_validation(tst) - - del tst["owner"] - self.assert_fails_validation(tst) - - def test_bad_tags(self): - tst = self.get_ok_dict() - tst["tags"] = [123] - self.assert_fails_validation(tst) - - -class TestUnparsedMetric(ContractTestCase): - ContractType = UnparsedMetric - - def get_ok_dict(self): - return { - "name": "new_customers", - "label": "New Customers", - "description": "New customers", - "type": "simple", - "type_params": { - "measure": { - "name": "customers", - "filter": "is_new = true", - "join_to_timespine": False, - }, - }, - "config": {}, - "tags": [], - "meta": {"is_okr": True}, - } - - def test_ok(self): - metric = self.ContractType( - name="new_customers", - label="New Customers", - description="New customers", - type="simple", - type_params=UnparsedMetricTypeParams( - measure=UnparsedMetricInputMeasure( - name="customers", - filter="is_new = true", - ) - ), - config={}, - meta={"is_okr": True}, - ) - dct = self.get_ok_dict() - self.assert_symmetric(metric, dct) - pickle.loads(pickle.dumps(metric)) - - def test_bad_metric_no_type_params(self): - tst = self.get_ok_dict() - del tst["type_params"] - self.assert_fails_validation(tst) - - def test_bad_tags(self): - tst = self.get_ok_dict() - tst["tags"] = [123] - self.assert_fails_validation(tst) - - -class TestUnparsedVersion(ContractTestCase): - ContractType = UnparsedVersion - - def get_ok_dict(self): - return { - "v": 2, - "defined_in": "test_defined_in", - "description": "A version", - "config": {}, - "constraints": [], - "docs": {"show": False}, - "data_tests": [], - "columns": [], - } - - def test_ok(self): - version = self.ContractType( - v=2, - defined_in="test_defined_in", - description="A version", - config={}, - constraints=[], - docs=Docs(show=False), - data_tests=[], - columns=[], - ) - dct = self.get_ok_dict() - self.assert_symmetric(version, dct) - pickle.loads(pickle.dumps(version)) - - def test_bad_version_no_v(self): - version = self.get_ok_dict() - del version["v"] - self.assert_fails_validation(version) - - -@pytest.mark.parametrize( - "left,right,expected_lt", - [ - # same types - (2, 12, True), - (12, 2, False), - ("a", "b", True), - ("b", "a", False), - # mismatched types - numeric - (2, 12.0, True), - (12.0, 2, False), - (2, "12", True), - ("12", 2, False), - # mismatched types - (1, "test", True), - ("test", 1, False), - ], -) -def test_unparsed_version_lt(left, right, expected_lt): - assert (UnparsedVersion(left) < UnparsedVersion(right)) == expected_lt diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 431c6d49ba8..8a19b0ad39f 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -1,17 +1,13 @@ import logging import re -from argparse import Namespace from typing import TypeVar import pytest from dbt.adapters.events import types as adapter_types -from dbt_common.events.event_manager_client import ctx_set_event_manager -from dbt.artifacts.schemas.results import TimingInfo, RunStatus -from dbt.artifacts.schemas.run import RunResult -from dbt_common.events import types from dbt.adapters.events.logging import AdapterLogger -from dbt_common.events.base_types import msg_from_base_event +from dbt.artifacts.schemas.results import RunStatus, TimingInfo +from dbt.artifacts.schemas.run import RunResult from dbt.events import types as core_types from dbt.events.base_types import ( CoreBaseEvent, @@ -22,14 +18,14 @@ TestLevel, WarnLevel, ) -from dbt_common.events.event_manager import TestEventManager, EventManager -from dbt_common.events.functions import msg_to_dict, msg_to_json -from dbt_common.events.helpers import get_json_string_utcnow from dbt.events.types import RunResultError -from dbt.flags import set_from_args from dbt.task.printer import print_run_result_error - -set_from_args(Namespace(WARN_ERROR=False), None) +from dbt_common.events import types +from dbt_common.events.base_types import msg_from_base_event +from dbt_common.events.event_manager import EventManager, TestEventManager +from dbt_common.events.event_manager_client import ctx_set_event_manager +from dbt_common.events.functions import msg_to_dict, msg_to_json +from dbt_common.events.helpers import get_json_string_utcnow # takes in a class and finds any subclasses for it @@ -124,7 +120,6 @@ def test_event_codes(self): core_types.MainReportVersion(version=""), core_types.MainReportArgs(args={}), core_types.MainTrackingUserState(user_state=""), - core_types.MergedFromState(num_merged=0, sample=[]), core_types.MissingProfileTarget(profile_name="", target_name=""), core_types.InvalidOptionYAML(option_name="vars"), core_types.LogDbtProjectError(), @@ -154,10 +149,14 @@ def test_event_codes(self): adapter_types.CollectFreshnessReturnSignature(), core_types.TestsConfigDeprecation(deprecated_path="", exp_path=""), core_types.ProjectFlagsMovedDeprecation(), - core_types.SpacesInModelNameDeprecation(model_name="", model_version="", level=""), - core_types.TotalModelNamesWithSpacesDeprecation( + core_types.SpacesInResourceNameDeprecation(unique_id="", level=""), + core_types.ResourceNamesWithSpacesDeprecation( count_invalid_names=1, show_debug_hint=True, level="" ), + core_types.PackageMaterializationOverrideDeprecation( + package_name="my_package", materialization_name="view" + ), + core_types.SourceFreshnessProjectHooksNotRun(), # E - DB Adapter ====================== adapter_types.AdapterEventDebug(), adapter_types.AdapterEventInfo(), diff --git a/tests/unit/test_functions.py b/tests/unit/test_functions.py index 57fc78b9e25..7d118dd7033 100644 --- a/tests/unit/test_functions.py +++ b/tests/unit/test_functions.py @@ -1,14 +1,13 @@ from argparse import Namespace + import pytest import dbt.flags as flags +from dbt.adapters.events.types import AdapterDeprecationWarning +from dbt.events.types import NoNodesForSelectionCriteria from dbt_common.events.functions import msg_to_dict, warn_or_error -from dbt.events.logging import setup_event_logger -from dbt_common.events.types import InfoLevel +from dbt_common.events.types import InfoLevel, RetryExternalCall from dbt_common.exceptions import EventCompilationError -from dbt.events.types import NoNodesForSelectionCriteria -from dbt.adapters.events.types import AdapterDeprecationWarning -from dbt_common.events.types import RetryExternalCall @pytest.mark.parametrize( @@ -82,13 +81,3 @@ def __init__(self): assert ( False ), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}" - - -def test_setup_event_logger_specify_max_bytes(mocker): - patched_file_handler = mocker.patch("dbt_common.events.logger.RotatingFileHandler") - args = Namespace(log_file_max_bytes=1234567) - flags.set_from_args(args, {}) - setup_event_logger(flags.get_flags()) - patched_file_handler.assert_called_once_with( - filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5 - ) diff --git a/tests/unit/test_graph_selection.py b/tests/unit/test_graph_selection.py index 533e6f96ed0..5d5cbf7469d 100644 --- a/tests/unit/test_graph_selection.py +++ b/tests/unit/test_graph_selection.py @@ -1,22 +1,14 @@ +import string from unittest import mock +import networkx as nx import pytest -import string -import dbt_common.exceptions -import dbt.graph.selector as graph_selector import dbt.graph.cli as graph_cli +import dbt.graph.selector as graph_selector +import dbt_common.exceptions from dbt.node_types import NodeType -import networkx as nx - -from dbt import flags - -from argparse import Namespace -from dbt.contracts.project import ProjectFlags - -flags.set_from_args(Namespace(), ProjectFlags()) - def _get_graph(): integer_graph = nx.balanced_tree(2, 2, nx.DiGraph()) @@ -58,7 +50,7 @@ def _get_manifest(graph): @pytest.fixture def graph(): - return graph_selector.Graph(_get_graph()) + return _get_graph() @pytest.fixture @@ -122,16 +114,9 @@ def id_macro(arg): @pytest.mark.parametrize("include,exclude,expected", run_specs, ids=id_macro) -def test_run_specs(include, exclude, expected): - graph = _get_graph() - manifest = _get_manifest(graph) +def test_run_specs(include, exclude, expected, graph, manifest): selector = graph_selector.NodeSelector(graph, manifest) - # TODO: The "eager" string below needs to be replaced with programatic access - # to the default value for the indirect selection parameter in - # dbt.cli.params.indirect_selection - # - # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - spec = graph_cli.parse_difference(include, exclude, "eager") + spec = graph_cli.parse_difference(include, exclude) selected, _ = selector.select_nodes(spec) assert selected == expected diff --git a/tests/unit/test_inject_ctes.py b/tests/unit/test_inject_ctes.py deleted file mode 100644 index 8b459f92e7c..00000000000 --- a/tests/unit/test_inject_ctes.py +++ /dev/null @@ -1,197 +0,0 @@ -from dbt.compilation import inject_ctes_into_sql -from dbt.contracts.graph.nodes import InjectedCTE -import re - - -def norm_whitespace(string): - _RE_COMBINE_WHITESPACE = re.compile(r"\s+") - string = _RE_COMBINE_WHITESPACE.sub(" ", string).strip() - return string - - -def test_inject_ctes_simple1(): - starting_sql = "select * from __dbt__cte__base" - ctes = [ - InjectedCTE( - id="model.test.base", - sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", - ) - ] - expected_sql = """with __dbt__cte__base as ( - select * from test16873767336887004702_test_ephemeral.seed - ) select * from __dbt__cte__base""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_simple2(): - starting_sql = "select * from __dbt__cte__ephemeral_level_two" - ctes = [ - InjectedCTE( - id="model.test.ephemeral_level_two", - sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873757769710148165_test_ephemeral"."source_table"\n)', - ) - ] - expected_sql = """with __dbt__cte__ephemeral_level_two as ( - select * from "dbt"."test16873757769710148165_test_ephemeral"."source_table" - ) select * from __dbt__cte__ephemeral_level_two""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_multiple_ctes(): - - starting_sql = "select * from __dbt__cte__ephemeral" - ctes = [ - InjectedCTE( - id="model.test.ephemeral_level_two", - sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873735573223965828_test_ephemeral"."source_table"\n)', - ), - InjectedCTE( - id="model.test.ephemeral", - sql=" __dbt__cte__ephemeral as (\n\nselect * from __dbt__cte__ephemeral_level_two\n)", - ), - ] - expected_sql = """with __dbt__cte__ephemeral_level_two as ( - select * from "dbt"."test16873735573223965828_test_ephemeral"."source_table" - ), __dbt__cte__ephemeral as ( - select * from __dbt__cte__ephemeral_level_two - ) select * from __dbt__cte__ephemeral""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_multiple_ctes_more_complex(): - starting_sql = """select * from __dbt__cte__female_only - union all - select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" - ctes = [ - InjectedCTE( - id="model.test.base", - sql=" __dbt__cte__base as (\n\n\nselect * from test16873757723266827902_test_ephemeral.seed\n)", - ), - InjectedCTE( - id="model.test.base_copy", - sql=" __dbt__cte__base_copy as (\n\n\nselect * from __dbt__cte__base\n)", - ), - InjectedCTE( - id="model.test.female_only", - sql=" __dbt__cte__female_only as (\n\n\nselect * from __dbt__cte__base_copy where gender = 'Female'\n)", - ), - ] - expected_sql = """with __dbt__cte__base as ( - select * from test16873757723266827902_test_ephemeral.seed - ), __dbt__cte__base_copy as ( - select * from __dbt__cte__base - ), __dbt__cte__female_only as ( - select * from __dbt__cte__base_copy where gender = 'Female' - ) select * from __dbt__cte__female_only - union all - select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_starting_with1(): - starting_sql = """ - with internal_cte as (select * from sessions) - select * from internal_cte - """ - ctes = [ - InjectedCTE( - id="cte_id_1", - sql="__dbt__cte__ephemeral as (select * from table)", - ), - InjectedCTE( - id="cte_id_2", - sql="__dbt__cte__events as (select id, type from events)", - ), - ] - expected_sql = """with __dbt__cte__ephemeral as (select * from table), - __dbt__cte__events as (select id, type from events), - internal_cte as (select * from sessions) - select * from internal_cte""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_starting_with2(): - starting_sql = """with my_other_cool_cte as ( - select id, name from __dbt__cte__ephemeral - where id > 1000 - ) - select name, id from my_other_cool_cte""" - ctes = [ - InjectedCTE( - id="model.singular_tests_ephemeral.ephemeral", - sql=' __dbt__cte__ephemeral as (\n\n\nwith my_cool_cte as (\n select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base"\n)\nselect id, name from my_cool_cte where id is not null\n)', - ) - ] - expected_sql = """with __dbt__cte__ephemeral as ( - with my_cool_cte as ( - select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base" - ) - select id, name from my_cool_cte where id is not null - ), my_other_cool_cte as ( - select id, name from __dbt__cte__ephemeral - where id > 1000 - ) - select name, id from my_other_cool_cte""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_comment_with(): - # Test injection with a comment containing "with" - starting_sql = """ - --- This is sql with a comment - select * from __dbt__cte__base - """ - ctes = [ - InjectedCTE( - id="model.test.base", - sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", - ) - ] - expected_sql = """with __dbt__cte__base as ( - select * from test16873767336887004702_test_ephemeral.seed - ) --- This is sql with a comment - select * from __dbt__cte__base""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_with_recursive(): - # Test injection with "recursive" keyword - starting_sql = """ - with recursive t(n) as ( - select * from __dbt__cte__first_ephemeral_model - union all - select n+1 from t where n < 100 - ) - select sum(n) from t - """ - ctes = [ - InjectedCTE( - id="model.test.first_ephemeral_model", - sql=" __dbt__cte__first_ephemeral_model as (\n\nselect 1 as fun\n)", - ) - ] - expected_sql = """with recursive __dbt__cte__first_ephemeral_model as ( - select 1 as fun - ), t(n) as ( - select * from __dbt__cte__first_ephemeral_model - union all - select n+1 from t where n < 100 - ) - select sum(n) from t - """ - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) diff --git a/tests/unit/test_deprecations.py b/tests/unit/test_internal_deprecations.py similarity index 74% rename from tests/unit/test_deprecations.py rename to tests/unit/test_internal_deprecations.py index ca8b8006cbc..69d30132ef4 100644 --- a/tests/unit/test_deprecations.py +++ b/tests/unit/test_internal_deprecations.py @@ -1,6 +1,4 @@ from dbt.internal_deprecations import deprecated -from dbt.flags import set_from_args -from argparse import Namespace @deprecated(reason="just because", version="1.23.0", suggested_action="Make some updates") @@ -10,6 +8,5 @@ def to_be_decorated(): # simple test that the return value is not modified def test_deprecated_func(): - set_from_args(Namespace(WARN_ERROR=False), None) assert hasattr(to_be_decorated, "__wrapped__") assert to_be_decorated() == 5 diff --git a/tests/unit/test_manifest_selectors.py b/tests/unit/test_manifest_selectors.py deleted file mode 100644 index f9b7ff279f9..00000000000 --- a/tests/unit/test_manifest_selectors.py +++ /dev/null @@ -1,199 +0,0 @@ -import textwrap -import yaml -from collections import OrderedDict -import unittest -from dbt.config.selectors import SelectorDict -from dbt.exceptions import DbtSelectorsError - - -def get_selector_dict(txt: str) -> OrderedDict: - txt = textwrap.dedent(txt) - dct = OrderedDict(yaml.safe_load(txt)) - return dct - - -class SelectorUnitTest(unittest.TestCase): - def test_compare_cli_non_cli(self): - dct = get_selector_dict( - """\ - selectors: - - name: nightly_diet_snowplow - description: "This uses more CLI-style syntax" - definition: - union: - - intersection: - - '@source:snowplow' - - 'tag:nightly' - - 'models/export' - - exclude: - - intersection: - - 'package:snowplow' - - 'config.materialized:incremental' - - export_performance_timing - - name: nightly_diet_snowplow_full - description: "This is a fuller YAML specification" - definition: - union: - - intersection: - - method: source - value: snowplow - childrens_parents: true - - method: tag - value: nightly - - method: path - value: models/export - - exclude: - - intersection: - - method: package - value: snowplow - - method: config.materialized - value: incremental - - method: fqn - value: export_performance_timing - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - with_strings = sel_dict["nightly_diet_snowplow"]["definition"] - no_strings = sel_dict["nightly_diet_snowplow_full"]["definition"] - self.assertEqual(with_strings, no_strings) - - def test_single_string_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: nightly_selector - definition: - 'tag:nightly' - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "tag", "value": "nightly"} - definition = sel_dict["nightly_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_single_key_value_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: nightly_selector - definition: - tag: nightly - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "tag", "value": "nightly"} - definition = sel_dict["nightly_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_parent_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: kpi_nightly_selector - definition: - '+exposure:kpi_nightly' - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "exposure", "value": "kpi_nightly", "parents": True} - definition = sel_dict["kpi_nightly_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_plus_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: my_model_children_selector - definition: - 'my_model+2' - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "fqn", "value": "my_model", "children": True, "children_depth": "2"} - definition = sel_dict["my_model_children_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_selector_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: default - definition: - union: - - intersection: - - tag: foo - - tag: bar - - name: inherited - definition: - method: selector - value: default - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - definition = sel_dict["default"]["definition"] - expected = sel_dict["inherited"]["definition"] - self.assertEqual(expected, definition) - - def test_selector_definition_with_exclusion(self): - dct = get_selector_dict( - """\ - selectors: - - name: default - definition: - union: - - intersection: - - tag: foo - - tag: bar - - name: inherited - definition: - union: - - method: selector - value: default - - exclude: - - tag: bar - - name: comparison - definition: - union: - - union: - - intersection: - - tag: foo - - tag: bar - - exclude: - - tag: bar - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list((dct["selectors"])) - assert sel_dict - definition = sel_dict["inherited"]["definition"] - expected = sel_dict["comparison"]["definition"] - self.assertEqual(expected, definition) - - def test_missing_selector(self): - dct = get_selector_dict( - """\ - selectors: - - name: inherited - definition: - method: selector - value: default - """ - ) - with self.assertRaises(DbtSelectorsError) as err: - SelectorDict.parse_from_selectors_list((dct["selectors"])) - - self.assertEqual( - "Existing selector definition for default not found.", str(err.exception.msg) - ) diff --git a/tests/unit/test_node_types.py b/tests/unit/test_node_types.py index 9611429a934..87bbf51e3a1 100644 --- a/tests/unit/test_node_types.py +++ b/tests/unit/test_node_types.py @@ -1,4 +1,5 @@ import pytest + from dbt.node_types import NodeType node_type_pluralizations = { diff --git a/tests/unit/test_partial_parsing.py b/tests/unit/test_partial_parsing.py deleted file mode 100644 index beac86abe38..00000000000 --- a/tests/unit/test_partial_parsing.py +++ /dev/null @@ -1,198 +0,0 @@ -import unittest -import time - -from dbt.parser.partial import PartialParsing -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import ModelNode -from dbt.contracts.files import ParseFileType, SourceFile, SchemaSourceFile, FilePath, FileHash -from dbt.node_types import NodeType -from dbt.tests.util import safe_set_invocation_context -from .utils import normalize - - -class TestPartialParsing(unittest.TestCase): - def setUp(self): - - safe_set_invocation_context() - - project_name = "my_test" - project_root = "/users/root" - sql_model_file = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="my_model.sql", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("abcdef"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.my_model"], - env_vars=[], - ) - sql_model_file_untouched = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="my_model_untouched.sql", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("abcdef"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.my_model_untouched"], - env_vars=[], - ) - - python_model_file = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="python_model.py", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("lalala"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.python_model"], - env_vars=[], - ) - python_model_file_untouched = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="python_model_untouched.py", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("lalala"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.python_model_untouched"], - env_vars=[], - ) - schema_file = SchemaSourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="schema.yml", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("ghijkl"), - project_name=project_name, - parse_file_type=ParseFileType.Schema, - dfy={ - "version": 2, - "models": [ - {"name": "my_model", "description": "Test model"}, - {"name": "python_model", "description": "python"}, - ], - }, - ndp=["model.my_test.my_model"], - env_vars={}, - ) - self.saved_files = { - schema_file.file_id: schema_file, - sql_model_file.file_id: sql_model_file, - python_model_file.file_id: python_model_file, - sql_model_file_untouched.file_id: sql_model_file_untouched, - python_model_file_untouched.file_id: python_model_file_untouched, - } - sql_model_node = self.get_model("my_model") - sql_model_node_untouched = self.get_model("my_model_untouched") - python_model_node = self.get_python_model("python_model") - python_model_node_untouched = self.get_python_model("python_model_untouched") - nodes = { - sql_model_node.unique_id: sql_model_node, - python_model_node.unique_id: python_model_node, - sql_model_node_untouched.unique_id: sql_model_node_untouched, - python_model_node_untouched.unique_id: python_model_node_untouched, - } - self.saved_manifest = Manifest(files=self.saved_files, nodes=nodes) - self.new_files = { - sql_model_file.file_id: SourceFile.from_dict(sql_model_file.to_dict()), - python_model_file.file_id: SourceFile.from_dict(python_model_file.to_dict()), - sql_model_file_untouched.file_id: SourceFile.from_dict( - sql_model_file_untouched.to_dict() - ), - python_model_file_untouched.file_id: SourceFile.from_dict( - python_model_file_untouched.to_dict() - ), - schema_file.file_id: SchemaSourceFile.from_dict(schema_file.to_dict()), - } - - self.partial_parsing = PartialParsing(self.saved_manifest, self.new_files) - - def get_model(self, name): - return ModelNode( - package_name="my_test", - path=f"{name}.sql", - original_file_path=f"models/{name}.sql", - language="sql", - raw_code="select * from wherever", - name=name, - resource_type=NodeType.Model, - unique_id=f"model.my_test.{name}", - fqn=["my_test", "models", name], - database="test_db", - schema="test_schema", - alias="bar", - checksum=FileHash.from_contents(""), - patch_path="my_test://" + normalize("models/schema.yml"), - ) - - def get_python_model(self, name): - return ModelNode( - package_name="my_test", - path=f"{name}.py", - original_file_path=f"models/{name}.py", - raw_code="import something", - language="python", - name=name, - resource_type=NodeType.Model, - unique_id=f"model.my_test.{name}", - fqn=["my_test", "models", name], - database="test_db", - schema="test_schema", - alias="bar", - checksum=FileHash.from_contents(""), - patch_path="my_test://" + normalize("models/schema.yml"), - ) - - def test_simple(self): - # Nothing has changed - self.assertIsNotNone(self.partial_parsing) - self.assertTrue(self.partial_parsing.skip_parsing()) - - # Change a model file - sql_model_file_id = "my_test://" + normalize("models/my_model.sql") - self.partial_parsing.new_files[sql_model_file_id].checksum = FileHash.from_contents( - "xyzabc" - ) - - python_model_file_id = "my_test://" + normalize("models/python_model.py") - self.partial_parsing.new_files[python_model_file_id].checksum = FileHash.from_contents( - "ohohoh" - ) - - self.partial_parsing.build_file_diff() - self.assertFalse(self.partial_parsing.skip_parsing()) - pp_files = self.partial_parsing.get_parsing_files() - pp_files["my_test"]["ModelParser"] = set(pp_files["my_test"]["ModelParser"]) - # models has 'patch_path' so we expect to see a SchemaParser file listed - schema_file_id = "my_test://" + normalize("models/schema.yml") - expected_pp_files = { - "my_test": { - "ModelParser": set([sql_model_file_id, python_model_file_id]), - "SchemaParser": [schema_file_id], - } - } - self.assertEqual(pp_files, expected_pp_files) - schema_file = self.saved_files[schema_file_id] - schema_file_model_names = set([model["name"] for model in schema_file.pp_dict["models"]]) - expected_model_names = set(["python_model", "my_model"]) - self.assertEqual(schema_file_model_names, expected_model_names) - schema_file_model_descriptions = set( - [model["description"] for model in schema_file.pp_dict["models"]] - ) - expected_model_descriptions = set(["Test model", "python"]) - self.assertEqual(schema_file_model_descriptions, expected_model_descriptions) diff --git a/tests/unit/test_relation.py b/tests/unit/test_relation.py deleted file mode 100644 index aa9cda258f9..00000000000 --- a/tests/unit/test_relation.py +++ /dev/null @@ -1,68 +0,0 @@ -from dataclasses import replace - -import pytest - -from dbt.adapters.base import BaseRelation -from dbt.adapters.contracts.relation import RelationType - - -@pytest.mark.parametrize( - "relation_type,result", - [ - (RelationType.View, True), - (RelationType.External, False), - ], -) -def test_can_be_renamed(relation_type, result): - my_relation = BaseRelation.create(type=relation_type) - my_relation = replace(my_relation, renameable_relations=frozenset({RelationType.View})) - assert my_relation.can_be_renamed is result - - -def test_can_be_renamed_default(): - my_relation = BaseRelation.create(type=RelationType.View) - assert my_relation.can_be_renamed is False - - -@pytest.mark.parametrize( - "relation_type,result", - [ - (RelationType.View, True), - (RelationType.External, False), - ], -) -def test_can_be_replaced(relation_type, result): - my_relation = BaseRelation.create(type=relation_type) - my_relation = replace(my_relation, replaceable_relations=frozenset({RelationType.View})) - assert my_relation.can_be_replaced is result - - -def test_can_be_replaced_default(): - my_relation = BaseRelation.create(type=RelationType.View) - assert my_relation.can_be_replaced is False - - -@pytest.mark.parametrize( - "limit,expected_result", - [ - (None, '"test_database"."test_schema"."test_identifier"'), - ( - 0, - '(select * from "test_database"."test_schema"."test_identifier" where false limit 0) _dbt_limit_subq', - ), - ( - 1, - '(select * from "test_database"."test_schema"."test_identifier" limit 1) _dbt_limit_subq', - ), - ], -) -def test_render_limited(limit, expected_result): - my_relation = BaseRelation.create( - database="test_database", - schema="test_schema", - identifier="test_identifier", - limit=limit, - ) - actual_result = my_relation.render_limited() - assert actual_result == expected_result - assert str(my_relation) == expected_result diff --git a/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py b/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py index e2765499355..99d364b5da4 100644 --- a/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py +++ b/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py @@ -1,11 +1,25 @@ -import pytest import copy +from typing import Protocol, runtime_checkable -from dbt.contracts.graph.nodes import ( - Metric, - SavedQuery, - SemanticModel, +import pytest +from dbt_semantic_interfaces.protocols import WhereFilter as WhereFilterProtocol +from dbt_semantic_interfaces.protocols import dimension as DimensionProtocols +from dbt_semantic_interfaces.protocols import entity as EntityProtocols +from dbt_semantic_interfaces.protocols import measure as MeasureProtocols +from dbt_semantic_interfaces.protocols import metadata as MetadataProtocols +from dbt_semantic_interfaces.protocols import metric as MetricProtocols +from dbt_semantic_interfaces.protocols import saved_query as SavedQueryProtocols +from dbt_semantic_interfaces.protocols import semantic_model as SemanticModelProtocols +from dbt_semantic_interfaces.type_enums import ( + AggregationType, + DimensionType, + EntityType, + MetricType, + TimeGranularity, ) +from hypothesis import given +from hypothesis.strategies import builds, none, text + from dbt.artifacts.resources import ( ConstantPropertyInput, ConversionTypeParams, @@ -26,27 +40,8 @@ SourceFileMetadata, WhereFilter, ) +from dbt.contracts.graph.nodes import Metric, SavedQuery, SemanticModel from dbt.node_types import NodeType -from dbt_semantic_interfaces.protocols import ( - dimension as DimensionProtocols, - entity as EntityProtocols, - measure as MeasureProtocols, - metadata as MetadataProtocols, - metric as MetricProtocols, - saved_query as SavedQueryProtocols, - semantic_model as SemanticModelProtocols, - WhereFilter as WhereFilterProtocol, -) -from dbt_semantic_interfaces.type_enums import ( - AggregationType, - DimensionType, - EntityType, - MetricType, - TimeGranularity, -) -from hypothesis import given -from hypothesis.strategies import builds, none, text -from typing import Protocol, runtime_checkable @runtime_checkable diff --git a/tests/unit/test_semantic_models.py b/tests/unit/test_semantic_models.py deleted file mode 100644 index 3bd79a679e9..00000000000 --- a/tests/unit/test_semantic_models.py +++ /dev/null @@ -1,106 +0,0 @@ -from copy import deepcopy -import pytest -from typing import List - -from dbt.artifacts.resources import Dimension, Entity, Measure, Defaults -from dbt.contracts.graph.nodes import SemanticModel -from dbt.artifacts.resources.v1.semantic_model import NodeRelation -from dbt.node_types import NodeType -from dbt_semantic_interfaces.references import MeasureReference -from dbt_semantic_interfaces.type_enums import AggregationType, DimensionType, EntityType - - -@pytest.fixture(scope="function") -def dimensions() -> List[Dimension]: - return [Dimension(name="ds", type=DimensionType)] - - -@pytest.fixture(scope="function") -def entities() -> List[Entity]: - return [Entity(name="test_entity", type=EntityType.PRIMARY, expr="id")] - - -@pytest.fixture(scope="function") -def measures() -> List[Measure]: - return [Measure(name="test_measure", agg=AggregationType.COUNT, expr="id")] - - -@pytest.fixture(scope="function") -def default_semantic_model( - dimensions: List[Dimension], entities: List[Entity], measures: List[Measure] -) -> SemanticModel: - return SemanticModel( - name="test_semantic_model", - resource_type=NodeType.SemanticModel, - model="ref('test_model')", - package_name="test", - path="test_path", - original_file_path="test_fixture", - unique_id=f"{NodeType.SemanticModel}.test.test_semantic_model", - fqn=[], - defaults=Defaults(agg_time_dimension="ds"), - dimensions=dimensions, - entities=entities, - measures=measures, - node_relation=NodeRelation( - alias="test_alias", schema_name="test_schema", database="test_database" - ), - ) - - -def test_checked_agg_time_dimension_for_measure_via_defaults( - default_semantic_model: SemanticModel, -): - assert default_semantic_model.defaults.agg_time_dimension is not None - measure = default_semantic_model.measures[0] - measure.agg_time_dimension = None - default_semantic_model.checked_agg_time_dimension_for_measure( - MeasureReference(element_name=measure.name) - ) - - -def test_checked_agg_time_dimension_for_measure_via_measure(default_semantic_model: SemanticModel): - default_semantic_model.defaults = None - measure = default_semantic_model.measures[0] - measure.agg_time_dimension = default_semantic_model.dimensions[0].name - default_semantic_model.checked_agg_time_dimension_for_measure( - MeasureReference(element_name=measure.name) - ) - - -def test_checked_agg_time_dimension_for_measure_exception(default_semantic_model: SemanticModel): - default_semantic_model.defaults = None - measure = default_semantic_model.measures[0] - measure.agg_time_dimension = None - - with pytest.raises(AssertionError) as execinfo: - default_semantic_model.checked_agg_time_dimension_for_measure( - MeasureReference(measure.name) - ) - - assert ( - f"Aggregation time dimension for measure {measure.name} on semantic model {default_semantic_model.name}" - in str(execinfo.value) - ) - - -def test_semantic_model_same_contents(default_semantic_model: SemanticModel): - default_semantic_model_copy = deepcopy(default_semantic_model) - - assert default_semantic_model.same_contents(default_semantic_model_copy) - - -def test_semantic_model_same_contents_update_model(default_semantic_model: SemanticModel): - default_semantic_model_copy = deepcopy(default_semantic_model) - default_semantic_model_copy.model = "ref('test_another_model')" - - assert not default_semantic_model.same_contents(default_semantic_model_copy) - - -def test_semantic_model_same_contents_different_node_relation( - default_semantic_model: SemanticModel, -): - default_semantic_model_copy = deepcopy(default_semantic_model) - default_semantic_model_copy.node_relation.alias = "test_another_alias" - # Relation should not be consided in same_contents - assert default_semantic_model.same_contents(default_semantic_model_copy) diff --git a/tests/unit/test_semver.py b/tests/unit/test_semver.py deleted file mode 100644 index deb155586b6..00000000000 --- a/tests/unit/test_semver.py +++ /dev/null @@ -1,298 +0,0 @@ -import unittest -import itertools - -from typing import List -from dbt_common.exceptions import VersionsNotCompatibleError -from dbt_common.semver import ( - VersionSpecifier, - UnboundedVersionSpecifier, - VersionRange, - reduce_versions, - versions_compatible, - resolve_to_specific_version, - filter_installable, -) - - -def semver_regex_versioning(versions: List[str]) -> bool: - for version_string in versions: - try: - VersionSpecifier.from_version_string(version_string) - except Exception: - return False - return True - - -def create_range(start_version_string, end_version_string): - start = UnboundedVersionSpecifier() - end = UnboundedVersionSpecifier() - - if start_version_string is not None: - start = VersionSpecifier.from_version_string(start_version_string) - - if end_version_string is not None: - end = VersionSpecifier.from_version_string(end_version_string) - - return VersionRange(start=start, end=end) - - -class TestSemver(unittest.TestCase): - def assertVersionSetResult(self, inputs, output_range): - expected = create_range(*output_range) - - for permutation in itertools.permutations(inputs): - self.assertEqual(reduce_versions(*permutation), expected) - - def assertInvalidVersionSet(self, inputs): - for permutation in itertools.permutations(inputs): - with self.assertRaises(VersionsNotCompatibleError): - reduce_versions(*permutation) - - def test__versions_compatible(self): - self.assertTrue(versions_compatible("0.0.1", "0.0.1")) - self.assertFalse(versions_compatible("0.0.1", "0.0.2")) - self.assertTrue(versions_compatible(">0.0.1", "0.0.2")) - self.assertFalse(versions_compatible("0.4.5a1", "0.4.5a2")) - - def test__semver_regex_versions(self): - self.assertTrue( - semver_regex_versioning( - [ - "0.0.4", - "1.2.3", - "10.20.30", - "1.1.2-prerelease+meta", - "1.1.2+meta", - "1.1.2+meta-valid", - "1.0.0-alpha", - "1.0.0-beta", - "1.0.0-alpha.beta", - "1.0.0-alpha.beta.1", - "1.0.0-alpha.1", - "1.0.0-alpha0.valid", - "1.0.0-alpha.0valid", - "1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay", - "1.0.0-rc.1+build.1", - "2.0.0-rc.1+build.123", - "1.2.3-beta", - "10.2.3-DEV-SNAPSHOT", - "1.2.3-SNAPSHOT-123", - "1.0.0", - "2.0.0", - "1.1.7", - "2.0.0+build.1848", - "2.0.1-alpha.1227", - "1.0.0-alpha+beta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12+788", - "1.2.3----R-S.12.9.1--.12+meta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12", - "1.0.0+0.build.1-rc.10000aaa-kk-0.1", - "99999999999999999999999.999999999999999999.99999999999999999", - "1.0.0-0A.is.legal", - ] - ) - ) - - self.assertFalse( - semver_regex_versioning( - [ - "1", - "1.2", - "1.2.3-0123", - "1.2.3-0123.0123", - "1.1.2+.123", - "+invalid", - "-invalid", - "-invalid+invalid", - "-invalid.01", - "alpha", - "alpha.beta", - "alpha.beta.1", - "alpha.1", - "alpha+beta", - "alpha_beta", - "alpha.", - "alpha..", - "beta", - "1.0.0-alpha_beta", - "-alpha.", - "1.0.0-alpha..", - "1.0.0-alpha..1", - "1.0.0-alpha...1", - "1.0.0-alpha....1", - "1.0.0-alpha.....1", - "1.0.0-alpha......1", - "1.0.0-alpha.......1", - "01.1.1", - "1.01.1", - "1.1.01", - "1.2", - "1.2.3.DEV", - "1.2-SNAPSHOT", - "1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788", - "1.2-RC-SNAPSHOT", - "-1.0.3-gamma+b7718", - "+justmeta", - "9.8.7+meta+meta", - "9.8.7-whatever+meta+meta", - "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12", - ] - ) - ) - - def test__reduce_versions(self): - self.assertVersionSetResult(["0.0.1", "0.0.1"], ["=0.0.1", "=0.0.1"]) - - self.assertVersionSetResult(["0.0.1"], ["=0.0.1", "=0.0.1"]) - - self.assertVersionSetResult([">0.0.1"], [">0.0.1", None]) - - self.assertVersionSetResult(["<0.0.1"], [None, "<0.0.1"]) - - self.assertVersionSetResult([">0.0.1", "0.0.2"], ["=0.0.2", "=0.0.2"]) - - self.assertVersionSetResult(["0.0.2", ">=0.0.2"], ["=0.0.2", "=0.0.2"]) - - self.assertVersionSetResult([">0.0.1", ">0.0.2", ">0.0.3"], [">0.0.3", None]) - - self.assertVersionSetResult([">0.0.1", "<0.0.3"], [">0.0.1", "<0.0.3"]) - - self.assertVersionSetResult([">0.0.1", "0.0.2", "<0.0.3"], ["=0.0.2", "=0.0.2"]) - - self.assertVersionSetResult([">0.0.1", ">=0.0.1", "<0.0.3"], [">0.0.1", "<0.0.3"]) - - self.assertVersionSetResult([">0.0.1", "<0.0.3", "<=0.0.3"], [">0.0.1", "<0.0.3"]) - - self.assertVersionSetResult([">0.0.1", ">0.0.2", "<0.0.3", "<0.0.4"], [">0.0.2", "<0.0.3"]) - - self.assertVersionSetResult(["<=0.0.3", ">=0.0.3"], [">=0.0.3", "<=0.0.3"]) - - self.assertInvalidVersionSet([">0.0.2", "0.0.1"]) - self.assertInvalidVersionSet([">0.0.2", "0.0.2"]) - self.assertInvalidVersionSet(["<0.0.2", "0.0.2"]) - self.assertInvalidVersionSet(["<0.0.2", ">0.0.3"]) - self.assertInvalidVersionSet(["<=0.0.3", ">0.0.3"]) - self.assertInvalidVersionSet(["<0.0.3", ">=0.0.3"]) - self.assertInvalidVersionSet(["<0.0.3", ">0.0.3"]) - - def test__resolve_to_specific_version(self): - self.assertEqual( - resolve_to_specific_version(create_range(">0.0.1", None), ["0.0.1", "0.0.2"]), "0.0.2" - ) - - self.assertEqual( - resolve_to_specific_version(create_range(">=0.0.2", None), ["0.0.1", "0.0.2"]), "0.0.2" - ) - - self.assertEqual( - resolve_to_specific_version(create_range(">=0.0.3", None), ["0.0.1", "0.0.2"]), None - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=0.0.3", "<0.0.5"), ["0.0.3", "0.0.4", "0.0.5"] - ), - "0.0.4", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(None, "<=0.0.5"), ["0.0.3", "0.1.4", "0.0.5"] - ), - "0.0.5", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range("=0.4.5a2", "=0.4.5a2"), ["0.4.5a1", "0.4.5a2"] - ), - "0.4.5a2", - ) - - self.assertEqual( - resolve_to_specific_version(create_range("=0.7.6", "=0.7.6"), ["0.7.6-b1", "0.7.6"]), - "0.7.6", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", None), ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1"] - ), - "1.2.0a1", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", "<1.2.0"), ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1"] - ), - "1.1.0", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", None), ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1", "1.2.0"] - ), - "1.2.0", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", "<1.2.0"), - ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1", "1.2.0"], - ), - "1.1.0", - ) - - self.assertEqual( - resolve_to_specific_version( - # https://github.com/dbt-labs/dbt-core/issues/7039 - # 10 is greater than 9 - create_range(">0.9.0", "<0.10.0"), - ["0.9.0", "0.9.1", "0.10.0"], - ), - "0.9.1", - ) - - def test__filter_installable(self): - installable = filter_installable( - [ - "1.1.0", - "1.2.0a1", - "1.0.0", - "2.1.0-alpha", - "2.2.0asdf", - "2.1.0", - "2.2.0", - "2.2.0-fishtown-beta", - "2.2.0-2", - ], - install_prerelease=True, - ) - expected = [ - "1.0.0", - "1.1.0", - "1.2.0a1", - "2.1.0-alpha", - "2.1.0", - "2.2.0-2", - "2.2.0asdf", - "2.2.0-fishtown-beta", - "2.2.0", - ] - assert installable == expected - - installable = filter_installable( - [ - "1.1.0", - "1.2.0a1", - "1.0.0", - "2.1.0-alpha", - "2.2.0asdf", - "2.1.0", - "2.2.0", - "2.2.0-fishtown-beta", - ], - install_prerelease=False, - ) - expected = ["1.0.0", "1.1.0", "2.1.0", "2.2.0"] - assert installable == expected diff --git a/tests/unit/test_sql_result.py b/tests/unit/test_sql_result.py deleted file mode 100644 index f7273acac2e..00000000000 --- a/tests/unit/test_sql_result.py +++ /dev/null @@ -1,19 +0,0 @@ -import unittest -from dbt.adapters.sql.connections import SQLConnectionManager - - -class TestProcessSQLResult(unittest.TestCase): - def test_duplicated_columns(self): - cols_with_one_dupe = ["a", "b", "a", "d"] - rows = [(1, 2, 3, 4)] - self.assertEqual( - SQLConnectionManager.process_results(cols_with_one_dupe, rows), - [{"a": 1, "b": 2, "a_2": 3, "d": 4}], - ) - - cols_with_more_dupes = ["a", "a", "a", "b"] - rows = [(1, 2, 3, 4)] - self.assertEqual( - SQLConnectionManager.process_results(cols_with_more_dupes, rows), - [{"a": 1, "a_2": 2, "a_3": 3, "b": 4}], - ) diff --git a/tests/unit/test_tracking.py b/tests/unit/test_tracking.py index accfa99bc3f..685f1108c1b 100644 --- a/tests/unit/test_tracking.py +++ b/tests/unit/test_tracking.py @@ -1,22 +1,25 @@ -import dbt.tracking import datetime -import shutil import tempfile -import unittest + +import pytest + +import dbt.tracking -class TestTracking(unittest.TestCase): - def setUp(self): - dbt.tracking.active_user = None - self.tempdir = tempfile.mkdtemp() +@pytest.fixture(scope="function") +def active_user_none() -> None: + dbt.tracking.active_user = None - def tearDown(self): - dbt.tracking.active_user = None - shutil.rmtree(self.tempdir) - def test_tracking_initial(self): +@pytest.fixture(scope="function") +def tempdir(active_user_none) -> str: + return tempfile.mkdtemp() + + +class TestTracking: + def test_tracking_initial(self, tempdir): assert dbt.tracking.active_user is None - dbt.tracking.initialize_from_flags(True, self.tempdir) + dbt.tracking.initialize_from_flags(True, tempdir) assert isinstance(dbt.tracking.active_user, dbt.tracking.User) invocation_id = dbt.tracking.active_user.invocation_id @@ -48,7 +51,7 @@ def test_tracking_initial(self): # if you use `!=`, you might hit a race condition (especially on windows) assert dbt.tracking.active_user.run_started_at is not run_started_at - def test_tracking_never_ok(self): + def test_tracking_never_ok(self, active_user_none): assert dbt.tracking.active_user is None # this should generate a whole new user object -> new invocation_id/run_started_at @@ -60,7 +63,7 @@ def test_tracking_never_ok(self): assert isinstance(dbt.tracking.active_user.invocation_id, str) assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime) - def test_disable_never_enabled(self): + def test_disable_never_enabled(self, active_user_none): assert dbt.tracking.active_user is None # this should generate a whole new user object -> new invocation_id/run_started_at @@ -72,10 +75,7 @@ def test_disable_never_enabled(self): assert isinstance(dbt.tracking.active_user.invocation_id, str) assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime) - def test_initialize_from_flags(self): - for send_anonymous_usage_stats in [True, False]: - with self.subTest(send_anonymous_usage_stats=send_anonymous_usage_stats): - - dbt.tracking.initialize_from_flags(send_anonymous_usage_stats, self.tempdir) - - assert dbt.tracking.active_user.do_not_track != send_anonymous_usage_stats + @pytest.mark.parametrize("send_anonymous_usage_stats", [True, False]) + def test_initialize_from_flags(self, tempdir, send_anonymous_usage_stats): + dbt.tracking.initialize_from_flags(send_anonymous_usage_stats, tempdir) + assert dbt.tracking.active_user.do_not_track != send_anonymous_usage_stats diff --git a/tests/unit/utils/__init__.py b/tests/unit/utils/__init__.py index 608249aa114..411ad6ae756 100644 --- a/tests/unit/utils/__init__.py +++ b/tests/unit/utils/__init__.py @@ -3,15 +3,16 @@ Note that all imports should be inside the functions to avoid import/mocking issues. """ -import string import os -from unittest import mock -from unittest import TestCase +import string +from unittest import TestCase, mock import agate import pytest -from dbt_common.dataclass_schema import ValidationError + from dbt.config.project import PartialProject +from dbt.contracts.graph.manifest import Manifest +from dbt_common.dataclass_schema import ValidationError def normalize(path): @@ -76,9 +77,10 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars={}): - from dbt.config import Project, Profile, RuntimeConfig from copy import deepcopy + from dbt.config import Profile, Project, RuntimeConfig + if isinstance(project, Project): profile_name = project.profile_name else: @@ -305,8 +307,8 @@ def MockGenerateMacro(package, component="some_component", **kwargs): def MockSource(package, source_name, name, **kwargs): - from dbt.node_types import NodeType from dbt.contracts.graph.nodes import SourceDefinition + from dbt.node_types import NodeType src = mock.MagicMock( __class__=SourceDefinition, @@ -322,8 +324,8 @@ def MockSource(package, source_name, name, **kwargs): def MockNode(package, name, resource_type=None, **kwargs): - from dbt.node_types import NodeType from dbt.contracts.graph.nodes import ModelNode, SeedNode + from dbt.node_types import NodeType if resource_type is None: resource_type = NodeType.Model @@ -351,8 +353,8 @@ def MockNode(package, name, resource_type=None, **kwargs): def MockDocumentation(package, name, **kwargs): - from dbt.node_types import NodeType from dbt.contracts.graph.nodes import Documentation + from dbt.node_types import NodeType doc = mock.MagicMock( __class__=Documentation, @@ -386,3 +388,17 @@ def replace_config(n, **kwargs): config=n.config.replace(**kwargs), unrendered_config=dict_replace(n.unrendered_config, **kwargs), ) + + +def make_manifest(nodes=[], sources=[], macros=[], docs=[]) -> Manifest: + return Manifest( + nodes={n.unique_id: n for n in nodes}, + macros={m.unique_id: m for m in macros}, + sources={s.unique_id: s for s in sources}, + docs={d.unique_id: d for d in docs}, + disabled={}, + files={}, + exposures={}, + metrics={}, + selectors={}, + ) diff --git a/tests/unit/utils/adapter.py b/tests/unit/utils/adapter.py new file mode 100644 index 00000000000..06555b0e400 --- /dev/null +++ b/tests/unit/utils/adapter.py @@ -0,0 +1,21 @@ +from unittest.mock import MagicMock + +import pytest + +from dbt.adapters.postgres import PostgresAdapter +from dbt.adapters.sql import SQLConnectionManager + + +@pytest.fixture +def mock_connection_manager() -> MagicMock: + mock_connection_manager = MagicMock(SQLConnectionManager) + mock_connection_manager.set_query_header = lambda query_header_context: None + return mock_connection_manager + + +@pytest.fixture +def mock_adapter(mock_connection_manager: MagicMock) -> MagicMock: + mock_adapter = MagicMock(PostgresAdapter) + mock_adapter.connections = mock_connection_manager + mock_adapter.clear_macro_resolver = MagicMock() + return mock_adapter diff --git a/tests/unit/utils/event_manager.py b/tests/unit/utils/event_manager.py new file mode 100644 index 00000000000..70415e36231 --- /dev/null +++ b/tests/unit/utils/event_manager.py @@ -0,0 +1,8 @@ +import pytest + +from dbt_common.events.event_manager_client import cleanup_event_logger + + +@pytest.fixture(autouse=True) +def always_clean_event_manager() -> None: + cleanup_event_logger() diff --git a/tests/unit/utils/flags.py b/tests/unit/utils/flags.py new file mode 100644 index 00000000000..20bb4a44ea0 --- /dev/null +++ b/tests/unit/utils/flags.py @@ -0,0 +1,33 @@ +import sys +from argparse import Namespace + +if sys.version_info < (3, 9): + from typing import Generator +else: + from collections.abc import Generator + +import pytest + +from dbt.flags import set_from_args + + +@pytest.fixture +def args_for_flags() -> Namespace: + """Defines the namespace args to be used in `set_from_args` of `set_test_flags` fixture. + + This fixture is meant to be overrided by tests that need specific flags to be set. + """ + return Namespace() + + +@pytest.fixture(autouse=True) +def set_test_flags(args_for_flags: Namespace) -> Generator[None, None, None]: + """Sets up and tears down the global flags for every pytest unit test + + Override `args_for_flags` fixture as needed to set any specific flags. + """ + set_from_args(args_for_flags, {}) + # fixtures stop setup upon yield + yield None + # everything after yield is run at test teardown + set_from_args(Namespace(), {}) diff --git a/tests/unit/utils/manifest.py b/tests/unit/utils/manifest.py index 2f56570df41..a7c269cdab2 100644 --- a/tests/unit/utils/manifest.py +++ b/tests/unit/utils/manifest.py @@ -1,54 +1,47 @@ -from argparse import Namespace import pytest +from dbt_semantic_interfaces.type_enums import MetricType +from dbt.artifacts.resources import ( + ExposureType, + MacroDependsOn, + MetricInputMeasure, + MetricTypeParams, + NodeRelation, + Owner, + QueryParams, + RefArgs, + TestConfig, + TestMetadata, +) from dbt.artifacts.resources.v1.model import ModelConfig from dbt.contracts.files import FileHash +from dbt.contracts.graph.manifest import Manifest, ManifestMetadata from dbt.contracts.graph.nodes import ( + AccessType, DependsOn, - NodeConfig, - Macro, - ModelNode, Exposure, - Metric, + GenericTestNode, Group, + Macro, + Metric, + ModelNode, + NodeConfig, SavedQuery, SeedNode, SemanticModel, SingularTestNode, - GenericTestNode, SourceDefinition, - AccessType, UnitTestDefinition, ) -from dbt.contracts.graph.manifest import Manifest, ManifestMetadata -from dbt.artifacts.resources import ( - ExposureType, - MetricInputMeasure, - MetricTypeParams, - NodeRelation, - Owner, - QueryParams, - MacroDependsOn, - TestConfig, - TestMetadata, - RefArgs, -) -from dbt.contracts.graph.unparsed import ( - UnitTestInputFixture, - UnitTestOutputFixture, -) +from dbt.contracts.graph.unparsed import UnitTestInputFixture, UnitTestOutputFixture from dbt.node_types import NodeType -from dbt_semantic_interfaces.type_enums import MetricType -from dbt.flags import set_from_args - -set_from_args(Namespace(WARN_ERROR=False), None) - def make_model( pkg, name, - sql, + code, + language="sql", refs=None, sources=None, tags=None, @@ -60,6 +53,7 @@ def make_model( version=None, latest_version=None, access=None, + patch_path=None, ): if refs is None: refs = [] @@ -68,7 +62,12 @@ def make_model( if tags is None: tags = [] if path is None: - path = f"{name}.sql" + if language == "sql": + path = f"{name}.sql" + elif language == "python": + path = f"{name}.py" + else: + raise ValueError(f"Unknown language: {language}") if alias is None: alias = name if config_kwargs is None: @@ -96,7 +95,7 @@ def make_model( return ModelNode( language="sql", - raw_code=sql, + raw_code=code, database="dbt", schema="dbt_schema", alias=alias, @@ -119,6 +118,7 @@ def make_model( version=version, latest_version=latest_version, access=access or AccessType.Protected, + patch_path=patch_path, ) @@ -977,6 +977,11 @@ def semantic_models() -> list: return [] +@pytest.fixture +def files() -> dict: + return {} + + @pytest.fixture def manifest( metric, @@ -987,6 +992,7 @@ def manifest( unit_tests, metrics, semantic_models, + files, ) -> Manifest: manifest = Manifest( nodes={n.unique_id: n for n in nodes}, @@ -995,12 +1001,13 @@ def manifest( unit_tests={t.unique_id: t for t in unit_tests}, semantic_models={s.unique_id: s for s in semantic_models}, docs={}, - files={}, + files=files, exposures={}, metrics={m.unique_id: m for m in metrics}, disabled={}, selectors={}, groups={}, - metadata=ManifestMetadata(adapter_type="postgres"), + metadata=ManifestMetadata(adapter_type="postgres", project_name="pkg"), ) + manifest.build_parent_and_child_maps() return manifest diff --git a/tests/unit/utils/project.py b/tests/unit/utils/project.py new file mode 100644 index 00000000000..c7215990e6d --- /dev/null +++ b/tests/unit/utils/project.py @@ -0,0 +1,88 @@ +from unittest.mock import MagicMock + +import pytest + +from dbt.adapters.contracts.connection import QueryComment +from dbt.config import RuntimeConfig +from dbt.config.project import Project, RenderComponents, VarProvider +from dbt.config.selectors import SelectorConfig +from dbt.contracts.project import PackageConfig +from dbt_common.semver import VersionSpecifier + + +@pytest.fixture(scope="function") +def selector_config() -> SelectorConfig: + return SelectorConfig.selectors_from_dict( + data={ + "selectors": [ + { + "name": "my_selector", + "definition": "give me cats", + "default": True, + } + ] + } + ) + + +@pytest.fixture(scope="function") +def project(selector_config: SelectorConfig) -> Project: + return Project( + project_name="test_project", + version=1.0, + project_root="doesnt/actually/exist", + profile_name="test_profile", + model_paths=["models"], + macro_paths=["macros"], + seed_paths=["seeds"], + test_paths=["tests"], + analysis_paths=["analyses"], + docs_paths=["docs"], + asset_paths=["assets"], + target_path="target", + snapshot_paths=["snapshots"], + clean_targets=["target"], + log_path="path/to/project/logs", + packages_install_path="dbt_packages", + packages_specified_path="packages.yml", + quoting={"database": True, "schema": True, "identifier": True}, + models={}, + on_run_start=[], + on_run_end=[], + dispatch=[{"macro_namespace": "dbt_utils", "search_order": ["test_project", "dbt_utils"]}], + seeds={}, + snapshots={}, + sources={}, + data_tests={}, + unit_tests={}, + metrics={}, + semantic_models={}, + saved_queries={}, + exposures={}, + vars=VarProvider({}), + dbt_version=[VersionSpecifier.from_version_string("0.0.0")], + packages=PackageConfig([]), + manifest_selectors={}, + selectors=selector_config, + query_comment=QueryComment(), + config_version=1, + unrendered=RenderComponents({}, {}, {}), + project_env_vars={}, + restrict_access=False, + dbt_cloud={}, + ) + + +@pytest.fixture +def mock_project(): + mock_project = MagicMock(RuntimeConfig) + mock_project.cli_vars = {} + mock_project.args = MagicMock() + mock_project.args.profile = "test" + mock_project.args.target = "test" + mock_project.project_env_vars = {} + mock_project.profile_env_vars = {} + mock_project.project_target_path = "mock_target_path" + mock_project.credentials = MagicMock() + mock_project.clear_dependencies = MagicMock() + return mock_project diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000000..3f71cac38ef --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass, field +from typing import List + +from dbt_common.events.base_types import BaseEvent, EventMsg + + +@dataclass +class EventCatcher: + event_to_catch: BaseEvent + caught_events: List[EventMsg] = field(default_factory=list) + + def catch(self, event: EventMsg): + if event.info.name == self.event_to_catch.__name__: + self.caught_events.append(event) + + def flush(self) -> None: + self.caught_events = [] diff --git a/third-party-stubs/logbook/__init__.pyi b/third-party-stubs/logbook/__init__.pyi deleted file mode 100644 index a0952ff7c5f..00000000000 --- a/third-party-stubs/logbook/__init__.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# Stubs for logbook (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from .__version__ import __version__ -from .base import ( - CRITICAL as CRITICAL, - DEBUG as DEBUG, - ERROR as ERROR, - Flags as Flags, - INFO as INFO, - LogRecord as LogRecord, - Logger as Logger, - LoggerGroup as LoggerGroup, - NOTICE as NOTICE, - NOTSET as NOTSET, - NestedSetup as NestedSetup, - Processor as Processor, - TRACE as TRACE, - WARNING as WARNING, - dispatch_record as dispatch_record, - get_level_name as get_level_name, - lookup_level as lookup_level, - set_datetime_format as set_datetime_format, -) -from .handlers import ( - BrotliCompressionHandler as BrotliCompressionHandler, - FileHandler as FileHandler, - FingersCrossedHandler as FingersCrossedHandler, - GMailHandler as GMailHandler, - GZIPCompressionHandler as GZIPCompressionHandler, - GroupHandler as GroupHandler, - Handler as Handler, - HashingHandlerMixin as HashingHandlerMixin, - LimitingHandlerMixin as LimitingHandlerMixin, - MailHandler as MailHandler, - MonitoringFileHandler as MonitoringFileHandler, - NTEventLogHandler as NTEventLogHandler, - NullHandler as NullHandler, - RotatingFileHandler as RotatingFileHandler, - StderrHandler as StderrHandler, - StreamHandler as StreamHandler, - StringFormatter as StringFormatter, - StringFormatterHandlerMixin as StringFormatterHandlerMixin, - SyslogHandler as SyslogHandler, - TestHandler as TestHandler, - TimedRotatingFileHandler as TimedRotatingFileHandler, - WrapperHandler as WrapperHandler, - create_syshandler as create_syshandler, -) -from . import compat as compat -from typing import Any - -trace: Any -debug: Any -info: Any -warn: Any -warning: Any -notice: Any -error: Any -exception: Any -catch_exceptions: Any -critical: Any -log: Any -default_handler: Any diff --git a/third-party-stubs/logbook/__version__.pyi b/third-party-stubs/logbook/__version__.pyi deleted file mode 100644 index e5b7a06e5ee..00000000000 --- a/third-party-stubs/logbook/__version__.pyi +++ /dev/null @@ -1,5 +0,0 @@ -# Stubs for logbook.__version__ (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -__version__: str diff --git a/third-party-stubs/logbook/_fallback.pyi b/third-party-stubs/logbook/_fallback.pyi deleted file mode 100644 index 0e2b32f6edf..00000000000 --- a/third-party-stubs/logbook/_fallback.pyi +++ /dev/null @@ -1,40 +0,0 @@ -# Stubs for logbook._fallback (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from typing import Any - -def group_reflected_property(name: Any, default: Any, fallback: Any = ...): ... - -class _StackBound: - def __init__(self, obj: Any, push: Any, pop: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... - -class StackedObject: - def push_greenlet(self) -> None: ... - def pop_greenlet(self) -> None: ... - def push_context(self) -> None: ... - def pop_context(self) -> None: ... - def push_thread(self) -> None: ... - def pop_thread(self) -> None: ... - def push_application(self) -> None: ... - def pop_application(self) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... - def greenletbound(self, _cls: Any = ...): ... - def contextbound(self, _cls: Any = ...): ... - def threadbound(self, _cls: Any = ...): ... - def applicationbound(self, _cls: Any = ...): ... - -class ContextStackManager: - def __init__(self) -> None: ... - def iter_context_objects(self): ... - def push_greenlet(self, obj: Any) -> None: ... - def pop_greenlet(self): ... - def push_context(self, obj: Any) -> None: ... - def pop_context(self): ... - def push_thread(self, obj: Any) -> None: ... - def pop_thread(self): ... - def push_application(self, obj: Any) -> None: ... - def pop_application(self): ... diff --git a/third-party-stubs/logbook/_termcolors.pyi b/third-party-stubs/logbook/_termcolors.pyi deleted file mode 100644 index 7284b777785..00000000000 --- a/third-party-stubs/logbook/_termcolors.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# Stubs for logbook._termcolors (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from typing import Any - -esc: str -codes: Any -dark_colors: Any -light_colors: Any -x: int - -def colorize(color_key: Any, text: Any): ... diff --git a/third-party-stubs/logbook/base.pyi b/third-party-stubs/logbook/base.pyi deleted file mode 100644 index ed769dd1698..00000000000 --- a/third-party-stubs/logbook/base.pyi +++ /dev/null @@ -1,184 +0,0 @@ -# Stubs for logbook.base (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook._fallback import StackedObject -from typing import Any, Optional - -def set_datetime_format(datetime_format: Any) -> None: ... - -CRITICAL: int -ERROR: int -WARNING: int -NOTICE: int -INFO: int -DEBUG: int -TRACE: int -NOTSET: int - -def level_name_property(): ... -def lookup_level(level: Any): ... -def get_level_name(level: Any): ... - -class _ExceptionCatcher: - logger: Any = ... - args: Any = ... - kwargs: Any = ... - def __init__(self, logger: Any, args: Any, kwargs: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any): ... - -class ContextObject(StackedObject): - stack_manager: Any = ... - def push_greenlet(self) -> None: ... - def pop_greenlet(self) -> None: ... - def push_context(self) -> None: ... - def pop_context(self) -> None: ... - def push_thread(self) -> None: ... - def pop_thread(self) -> None: ... - def push_application(self) -> None: ... - def pop_application(self) -> None: ... - -class NestedSetup(StackedObject): - objects: Any = ... - def __init__(self, objects: Optional[Any] = ...) -> None: ... - def push_application(self) -> None: ... - def pop_application(self) -> None: ... - def push_thread(self) -> None: ... - def pop_thread(self) -> None: ... - def push_greenlet(self) -> None: ... - def pop_greenlet(self) -> None: ... - def push_context(self) -> None: ... - def pop_context(self) -> None: ... - -class Processor(ContextObject): - stack_manager: Any = ... - callback: Any = ... - def __init__(self, callback: Optional[Any] = ...) -> None: ... - def process(self, record: Any) -> None: ... - -class _InheritedType: - def __reduce__(self): ... - -Inherit: Any - -class Flags(ContextObject): - stack_manager: Any = ... - def __init__(self, **flags: Any) -> None: ... - @staticmethod - def get_flag(flag: Any, default: Optional[Any] = ...): ... - -class LogRecord: - keep_open: bool = ... - time: Any = ... - heavy_initialized: bool = ... - late: bool = ... - information_pulled: bool = ... - channel: Any = ... - msg: Any = ... - args: Any = ... - kwargs: Any = ... - level: Any = ... - exc_info: Any = ... - extra: Any = ... - frame: Any = ... - frame_correction: Any = ... - process: int = ... - def __init__( - self, - channel: Any, - level: Any, - msg: Any, - args: Optional[Any] = ..., - kwargs: Optional[Any] = ..., - exc_info: Optional[Any] = ..., - extra: Optional[Any] = ..., - frame: Optional[Any] = ..., - dispatcher: Optional[Any] = ..., - frame_correction: int = ..., - ) -> None: ... - def heavy_init(self) -> None: ... - def pull_information(self) -> None: ... - def close(self) -> None: ... - def __reduce_ex__(self, protocol: Any): ... - def to_dict(self, json_safe: bool = ...): ... - @classmethod - def from_dict(cls, d: Any): ... - def update_from_dict(self, d: Any): ... - def message(self): ... - level_name: Any = ... - def calling_frame(self): ... - def func_name(self): ... - def module(self): ... - def filename(self): ... - def lineno(self): ... - def greenlet(self): ... - def thread(self): ... - @property - def thread_name(self) -> str: ... - def process_name(self): ... - @property - def formatted_exception(self) -> Optional[str]: ... - def exception_name(self): ... - @property - def exception_shortname(self): ... - def exception_message(self): ... - @property - def dispatcher(self): ... - -class LoggerMixin: - level_name: Any = ... - def trace(self, *args: Any, **kwargs: Any) -> None: ... - def debug(self, *args: Any, **kwargs: Any) -> None: ... - def info(self, *args: Any, **kwargs: Any) -> None: ... - def warn(self, *args: Any, **kwargs: Any) -> None: ... - def warning(self, *args: Any, **kwargs: Any): ... - def notice(self, *args: Any, **kwargs: Any) -> None: ... - def error(self, *args: Any, **kwargs: Any) -> None: ... - def exception(self, *args: Any, **kwargs: Any): ... - def critical(self, *args: Any, **kwargs: Any) -> None: ... - def log(self, level: Any, *args: Any, **kwargs: Any) -> None: ... - def catch_exceptions(self, *args: Any, **kwargs: Any): ... - disabled: bool = ... - def enable(self) -> None: ... - def disable(self) -> None: ... - -class RecordDispatcher: - suppress_dispatcher: bool = ... - name: Any = ... - handlers: Any = ... - group: Any = ... - level: Any = ... - def __init__(self, name: Optional[Any] = ..., level: Any = ...) -> None: ... - disabled: Any = ... - def handle(self, record: Any) -> None: ... - def make_record_and_handle( - self, - level: Any, - msg: Any, - args: Any, - kwargs: Any, - exc_info: Any, - extra: Any, - frame_correction: Any, - ) -> None: ... - def call_handlers(self, record: Any) -> None: ... - def process_record(self, record: Any) -> None: ... - -class Logger(RecordDispatcher, LoggerMixin): ... - -class LoggerGroup: - loggers: Any = ... - level: Any = ... - disabled: bool = ... - processor: Any = ... - def __init__( - self, loggers: Optional[Any] = ..., level: Any = ..., processor: Optional[Any] = ... - ) -> None: ... - def add_logger(self, logger: Any) -> None: ... - def remove_logger(self, logger: Any) -> None: ... - def process_record(self, record: Any) -> None: ... - def enable(self, force: bool = ...) -> None: ... - def disable(self, force: bool = ...) -> None: ... - -def dispatch_record(record: Any) -> None: ... diff --git a/third-party-stubs/logbook/compat.pyi b/third-party-stubs/logbook/compat.pyi deleted file mode 100644 index 75592bf31ed..00000000000 --- a/third-party-stubs/logbook/compat.pyi +++ /dev/null @@ -1,60 +0,0 @@ -# Stubs for logbook.compat (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -import logbook -import logging -from typing import Any, Optional - -def redirect_logging(set_root_logger_level: bool = ...) -> None: ... - -class redirected_logging: - old_handlers: Any = ... - old_level: Any = ... - set_root_logger_level: Any = ... - def __init__(self, set_root_logger_level: bool = ...) -> None: ... - def start(self) -> None: ... - def end( - self, etype: Optional[Any] = ..., evalue: Optional[Any] = ..., tb: Optional[Any] = ... - ) -> None: ... - __enter__: Any = ... - __exit__: Any = ... - -class LoggingCompatRecord(logbook.LogRecord): ... - -class RedirectLoggingHandler(logging.Handler): - def __init__(self) -> None: ... - def convert_level(self, level: Any): ... - def find_extra(self, old_record: Any): ... - def find_caller(self, old_record: Any): ... - def convert_time(self, timestamp: Any): ... - def convert_record(self, old_record: Any): ... - def emit(self, record: Any) -> None: ... - -class LoggingHandler(logbook.Handler): - logger: Any = ... - def __init__( - self, - logger: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def get_logger(self, record: Any): ... - def convert_level(self, level: Any): ... - def convert_time(self, dt: Any): ... - def convert_record(self, old_record: Any): ... - def emit(self, record: Any) -> None: ... - -def redirect_warnings() -> None: ... - -class redirected_warnings: - def __init__(self) -> None: ... - def message_to_unicode(self, message: Any): ... - def make_record(self, message: Any, exception: Any, filename: Any, lineno: Any): ... - def start(self) -> None: ... - def end( - self, etype: Optional[Any] = ..., evalue: Optional[Any] = ..., tb: Optional[Any] = ... - ) -> None: ... - __enter__: Any = ... - __exit__: Any = ... diff --git a/third-party-stubs/logbook/concurrency.pyi b/third-party-stubs/logbook/concurrency.pyi deleted file mode 100644 index 070fd741ad0..00000000000 --- a/third-party-stubs/logbook/concurrency.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# Stubs for logbook.concurrency (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from _thread import _local as thread_local, get_ident as thread_get_ident -from typing import Any, Optional - -has_gevent: bool -use_gevent: bool - -def enable_gevent() -> None: ... -def is_gevent_enabled(): ... - -ThreadLock: Any -ThreadRLock: Any -thread_get_ident: Any -thread_local: Any - -def thread_get_name(): ... - -class GreenletRLock: - def __init__(self) -> None: ... - def acquire(self, blocking: int = ...): ... - def release(self) -> None: ... - __enter__: Any = ... - def __exit__(self, t: Any, v: Any, tb: Any) -> None: ... - -greenlet_get_ident = thread_get_ident -greenlet_local = thread_local - -class GreenletRLock: - def acquire(self) -> None: ... - def release(self) -> None: ... - def __enter__(self) -> None: ... - def __exit__(self, t: Any, v: Any, tb: Any) -> None: ... - -def new_fine_grained_lock(): ... - -has_contextvars: bool -context_ident_counter: Any -context_ident: Any - -def context_get_ident(): ... -def is_context_enabled(): ... - -class ContextVar: - name: Any = ... - local: Any = ... - def __init__(self, name: Any) -> None: ... - def set(self, value: Any) -> None: ... - def get(self, default: Optional[Any] = ...): ... diff --git a/third-party-stubs/logbook/handlers.pyi b/third-party-stubs/logbook/handlers.pyi deleted file mode 100644 index 54131084690..00000000000 --- a/third-party-stubs/logbook/handlers.pyi +++ /dev/null @@ -1,412 +0,0 @@ -# Stubs for logbook.handlers (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.base import ContextObject -from typing import Any, Optional - -DEFAULT_FORMAT_STRING: Any -SYSLOG_FORMAT_STRING: Any -NTLOG_FORMAT_STRING: Any -TEST_FORMAT_STRING: Any -MAIL_FORMAT_STRING: Any -MAIL_RELATED_FORMAT_STRING: Any -SYSLOG_PORT: int -REGTYPE: Any - -def create_syshandler(application_name: Any, level: Any = ...): ... - -class _HandlerType(type): - def __new__(cls, name: Any, bases: Any, d: Any): ... - -class Handler(ContextObject): - stack_manager: Any = ... - blackhole: bool = ... - level: Any = ... - formatter: Any = ... - filter: Any = ... - bubble: Any = ... - def __init__( - self, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - level_name: Any = ... - def format(self, record: Any): ... - def should_handle(self, record: Any): ... - def handle(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def emit_batch(self, records: Any, reason: Any) -> None: ... - def close(self) -> None: ... - def handle_error(self, record: Any, exc_info: Any) -> None: ... - -class NullHandler(Handler): - blackhole: bool = ... - def __init__(self, level: Any = ..., filter: Optional[Any] = ...) -> None: ... - -class WrapperHandler(Handler): - handler: Any = ... - def __init__(self, handler: Any) -> None: ... - def __getattr__(self, name: Any): ... - def __setattr__(self, name: Any, value: Any): ... - -class StringFormatter: - format_string: Any = ... - def __init__(self, format_string: Any) -> None: ... - def format_record(self, record: Any, handler: Any): ... - def format_exception(self, record: Any): ... - def __call__(self, record: Any, handler: Any): ... - -class StringFormatterHandlerMixin: - default_format_string: Any = ... - formatter_class: Any = ... - format_string: Any = ... - def __init__(self, format_string: Any) -> None: ... - -class HashingHandlerMixin: - def hash_record_raw(self, record: Any): ... - def hash_record(self, record: Any): ... - -class LimitingHandlerMixin(HashingHandlerMixin): - record_limit: Any = ... - record_delta: Any = ... - def __init__(self, record_limit: Any, record_delta: Any) -> None: ... - def check_delivery(self, record: Any): ... - -class StreamHandler(Handler, StringFormatterHandlerMixin): - encoding: Any = ... - lock: Any = ... - stream: Any = ... - def __init__( - self, - stream: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - encoding: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any): ... - def ensure_stream_is_open(self) -> None: ... - def close(self) -> None: ... - def flush(self) -> None: ... - def encode(self, msg: Any): ... - def write(self, item: Any) -> None: ... - def emit(self, record: Any) -> None: ... - def should_flush(self): ... - -class FileHandler(StreamHandler): - stream: Any = ... - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def write(self, item: Any) -> None: ... - def close(self) -> None: ... - def encode(self, record: Any): ... - def ensure_stream_is_open(self) -> None: ... - -class GZIPCompressionHandler(FileHandler): - def __init__( - self, - filename: Any, - encoding: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - compression_quality: int = ..., - ) -> None: ... - def write(self, item: Any) -> None: ... - def should_flush(self): ... - -class BrotliCompressionHandler(FileHandler): - def __init__( - self, - filename: Any, - encoding: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - compression_window_size: Any = ..., - compression_quality: int = ..., - ) -> None: ... - def write(self, item: Any) -> None: ... - def should_flush(self): ... - def flush(self) -> None: ... - def close(self) -> None: ... - -class MonitoringFileHandler(FileHandler): - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - stream: Any = ... - def emit(self, record: Any) -> None: ... - -class StderrHandler(StreamHandler): - def __init__( - self, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - @property - def stream(self): ... - -class RotatingFileHandler(FileHandler): - max_size: Any = ... - backup_count: Any = ... - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - max_size: Any = ..., - backup_count: int = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def should_rollover(self, record: Any, bytes: Any): ... - def perform_rollover(self) -> None: ... - def emit(self, record: Any) -> None: ... - -class TimedRotatingFileHandler(FileHandler): - date_format: Any = ... - backup_count: Any = ... - rollover_format: Any = ... - original_filename: Any = ... - timed_filename_for_current: Any = ... - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - date_format: str = ..., - backup_count: int = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - timed_filename_for_current: bool = ..., - rollover_format: str = ..., - ) -> None: ... - def generate_timed_filename(self, timestamp: Any): ... - def files_to_delete(self): ... - def perform_rollover(self, new_timestamp: Any) -> None: ... - def emit(self, record: Any) -> None: ... - -class TestHandler(Handler, StringFormatterHandlerMixin): - default_format_string: Any = ... - records: Any = ... - def __init__( - self, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - force_heavy_init: bool = ..., - ) -> None: ... - def close(self) -> None: ... - def emit(self, record: Any) -> None: ... - @property - def formatted_records(self): ... - @property - def has_criticals(self): ... - @property - def has_errors(self): ... - @property - def has_warnings(self): ... - @property - def has_notices(self): ... - @property - def has_infos(self): ... - @property - def has_debugs(self): ... - @property - def has_traces(self): ... - def has_critical(self, *args: Any, **kwargs: Any): ... - def has_error(self, *args: Any, **kwargs: Any): ... - def has_warning(self, *args: Any, **kwargs: Any): ... - def has_notice(self, *args: Any, **kwargs: Any): ... - def has_info(self, *args: Any, **kwargs: Any): ... - def has_debug(self, *args: Any, **kwargs: Any): ... - def has_trace(self, *args: Any, **kwargs: Any): ... - -class MailHandler(Handler, StringFormatterHandlerMixin, LimitingHandlerMixin): - default_format_string: Any = ... - default_related_format_string: Any = ... - default_subject: Any = ... - max_record_cache: int = ... - record_cache_prune: float = ... - from_addr: Any = ... - recipients: Any = ... - subject: Any = ... - server_addr: Any = ... - credentials: Any = ... - secure: Any = ... - related_format_string: Any = ... - starttls: Any = ... - def __init__( - self, - from_addr: Any, - recipients: Any, - subject: Optional[Any] = ..., - server_addr: Optional[Any] = ..., - credentials: Optional[Any] = ..., - secure: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - related_format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - starttls: bool = ..., - ) -> None: ... - def get_recipients(self, record: Any): ... - def message_from_record(self, record: Any, suppressed: Any): ... - def format_related_record(self, record: Any): ... - def generate_mail(self, record: Any, suppressed: int = ...): ... - def collapse_mails(self, mail: Any, related: Any, reason: Any): ... - def get_connection(self): ... - def close_connection(self, con: Any) -> None: ... - def deliver(self, msg: Any, recipients: Any) -> None: ... - def emit(self, record: Any) -> None: ... - def emit_batch(self, records: Any, reason: Any) -> None: ... - -class GMailHandler(MailHandler): - def __init__(self, account_id: Any, password: Any, recipients: Any, **kw: Any) -> None: ... - -class SyslogHandler(Handler, StringFormatterHandlerMixin): - default_format_string: Any = ... - LOG_EMERG: int = ... - LOG_ALERT: int = ... - LOG_CRIT: int = ... - LOG_ERR: int = ... - LOG_WARNING: int = ... - LOG_NOTICE: int = ... - LOG_INFO: int = ... - LOG_DEBUG: int = ... - LOG_KERN: int = ... - LOG_USER: int = ... - LOG_MAIL: int = ... - LOG_DAEMON: int = ... - LOG_AUTH: int = ... - LOG_SYSLOG: int = ... - LOG_LPR: int = ... - LOG_NEWS: int = ... - LOG_UUCP: int = ... - LOG_CRON: int = ... - LOG_AUTHPRIV: int = ... - LOG_FTP: int = ... - LOG_LOCAL0: int = ... - LOG_LOCAL1: int = ... - LOG_LOCAL2: int = ... - LOG_LOCAL3: int = ... - LOG_LOCAL4: int = ... - LOG_LOCAL5: int = ... - LOG_LOCAL6: int = ... - LOG_LOCAL7: int = ... - facility_names: Any = ... - level_priority_map: Any = ... - application_name: Any = ... - remote_address: Any = ... - facility: Any = ... - socktype: Any = ... - enveloper: Any = ... - record_delimiter: Any = ... - connection_exception: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - address: Optional[Any] = ..., - facility: str = ..., - socktype: Any = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - record_delimiter: Optional[Any] = ..., - ) -> None: ... - def encode_priority(self, record: Any): ... - def wrap_segments(self, record: Any, before: Any): ... - def unix_envelope(self, record: Any): ... - format_string: Any = ... - def net_envelope(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def send_to_socket(self, data: Any) -> None: ... - def close(self) -> None: ... - -class NTEventLogHandler(Handler, StringFormatterHandlerMixin): - dllname: Any = ... - default_format_string: Any = ... - application_name: Any = ... - log_type: Any = ... - def __init__( - self, - application_name: Any, - log_type: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def unregister_logger(self) -> None: ... - def get_event_type(self, record: Any): ... - def get_event_category(self, record: Any): ... - def get_message_id(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class FingersCrossedHandler(Handler): - batch_emit_reason: str = ... - lock: Any = ... - buffered_records: Any = ... - buffer_size: Any = ... - def __init__( - self, - handler: Any, - action_level: Any = ..., - buffer_size: int = ..., - pull_information: bool = ..., - reset: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def close(self) -> None: ... - def enqueue(self, record: Any): ... - def rollover(self, record: Any) -> None: ... - @property - def triggered(self): ... - def emit(self, record: Any) -> None: ... - -class GroupHandler(WrapperHandler): - pull_information: Any = ... - buffered_records: Any = ... - def __init__(self, handler: Any, pull_information: bool = ...) -> None: ... - def rollover(self) -> None: ... - def pop_application(self) -> None: ... - def pop_thread(self) -> None: ... - def pop_context(self) -> None: ... - def pop_greenlet(self) -> None: ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/helpers.pyi b/third-party-stubs/logbook/helpers.pyi deleted file mode 100644 index 02f13f15ea0..00000000000 --- a/third-party-stubs/logbook/helpers.pyi +++ /dev/null @@ -1,42 +0,0 @@ -# Stubs for logbook.helpers (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -import os -from typing import Any, Optional - -PY2: Any -iteritems: Any -xrange: Any -xrange = range - -def u(s: Any): ... - -u: Any -integer_types: Any -string_types: Any - -def reraise(tp: Any, value: Any, tb: Optional[Any] = ...) -> None: ... -def b(x: Any): ... - -can_rename_open_file: bool - -def rename(src: Any, dst: Any) -> None: ... - -rename = os.rename - -def to_safe_json(data: Any): ... -def format_iso8601(d: Optional[Any] = ...): ... -def parse_iso8601(value: Any): ... -def get_application_name(): ... - -class cached_property: - __name__: Any = ... - __module__: Any = ... - __doc__: Any = ... - func: Any = ... - def __init__(self, func: Any, name: Optional[Any] = ..., doc: Optional[Any] = ...) -> None: ... - def __get__(self, obj: Any, type: Optional[Any] = ...): ... - -def get_iterator_next_method(it: Any): ... -def is_unicode(x: Any): ... diff --git a/third-party-stubs/logbook/more.pyi b/third-party-stubs/logbook/more.pyi deleted file mode 100644 index f0ff7af11db..00000000000 --- a/third-party-stubs/logbook/more.pyi +++ /dev/null @@ -1,148 +0,0 @@ -# Stubs for logbook.more (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.base import RecordDispatcher -from logbook.handlers import ( - FingersCrossedHandler as FingersCrossedHandlerBase, - Handler, - StderrHandler, - StringFormatter, - StringFormatterHandlerMixin, -) -from logbook.ticketing import BackendBase -from typing import Any, Optional - -TWITTER_FORMAT_STRING: Any -TWITTER_ACCESS_TOKEN_URL: str -NEW_TWEET_URL: str - -class CouchDBBackend(BackendBase): - database: Any = ... - def setup_backend(self) -> None: ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - -class TwitterFormatter(StringFormatter): - max_length: int = ... - def format_exception(self, record: Any): ... - def __call__(self, record: Any, handler: Any): ... - -class TaggingLogger(RecordDispatcher): - def __init__(self, name: Optional[Any] = ..., tags: Optional[Any] = ...) -> None: ... - def log(self, tags: Any, msg: Any, *args: Any, **kwargs: Any): ... - -class TaggingHandler(Handler): - def __init__(self, handlers: Any, filter: Optional[Any] = ..., bubble: bool = ...) -> None: ... - def emit(self, record: Any) -> None: ... - -class TwitterHandler(Handler, StringFormatterHandlerMixin): - default_format_string: Any = ... - formatter_class: Any = ... - consumer_key: Any = ... - consumer_secret: Any = ... - username: Any = ... - password: Any = ... - def __init__( - self, - consumer_key: Any, - consumer_secret: Any, - username: Any, - password: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def get_oauth_token(self): ... - def make_client(self): ... - def tweet(self, status: Any): ... - def emit(self, record: Any) -> None: ... - -class SlackHandler(Handler, StringFormatterHandlerMixin): - api_token: Any = ... - channel: Any = ... - slack: Any = ... - def __init__( - self, - api_token: Any, - channel: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class JinjaFormatter: - template: Any = ... - def __init__(self, template: Any) -> None: ... - def __call__(self, record: Any, handler: Any): ... - -class ExternalApplicationHandler(Handler): - encoding: Any = ... - def __init__( - self, - arguments: Any, - stdin_format: Optional[Any] = ..., - encoding: str = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class ColorizingStreamHandlerMixin: - def force_color(self) -> None: ... - def forbid_color(self) -> None: ... - def should_colorize(self, record: Any): ... - def get_color(self, record: Any): ... - def format(self, record: Any): ... - -class ColorizedStderrHandler(ColorizingStreamHandlerMixin, StderrHandler): - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - -class FingersCrossedHandler(FingersCrossedHandlerBase): - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - -class ExceptionHandler(Handler, StringFormatterHandlerMixin): - exc_type: Any = ... - def __init__( - self, - exc_type: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def handle(self, record: Any): ... - -class DedupHandler(Handler): - def __init__(self, format_string: str = ..., *args: Any, **kwargs: Any) -> None: ... - def clear(self) -> None: ... - def pop_application(self) -> None: ... - def pop_thread(self) -> None: ... - def pop_context(self) -> None: ... - def pop_greenlet(self) -> None: ... - def handle(self, record: Any): ... - def flush(self) -> None: ... - -class RiemannHandler(Handler): - host: Any = ... - port: Any = ... - ttl: Any = ... - queue: Any = ... - flush_threshold: Any = ... - transport: Any = ... - def __init__( - self, - host: Any, - port: Any, - message_type: str = ..., - ttl: int = ..., - flush_threshold: int = ..., - bubble: bool = ..., - filter: Optional[Any] = ..., - level: Any = ..., - ) -> None: ... - def record_to_event(self, record: Any): ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/notifiers.pyi b/third-party-stubs/logbook/notifiers.pyi deleted file mode 100644 index 9bd1fe73bb8..00000000000 --- a/third-party-stubs/logbook/notifiers.pyi +++ /dev/null @@ -1,123 +0,0 @@ -# Stubs for logbook.notifiers (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.handlers import Handler, LimitingHandlerMixin -from typing import Any, Optional - -def create_notification_handler( - application_name: Optional[Any] = ..., level: Any = ..., icon: Optional[Any] = ... -): ... - -class NotificationBaseHandler(Handler, LimitingHandlerMixin): - application_name: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def make_title(self, record: Any): ... - def make_text(self, record: Any): ... - -class GrowlHandler(NotificationBaseHandler): - def __init__( - self, - application_name: Optional[Any] = ..., - icon: Optional[Any] = ..., - host: Optional[Any] = ..., - password: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def is_sticky(self, record: Any): ... - def get_priority(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class LibNotifyHandler(NotificationBaseHandler): - icon: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - icon: Optional[Any] = ..., - no_init: bool = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def set_notifier_icon(self, notifier: Any, icon: Any) -> None: ... - def get_expires(self, record: Any): ... - def get_urgency(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class BoxcarHandler(NotificationBaseHandler): - api_url: str = ... - email: Any = ... - password: Any = ... - def __init__( - self, - email: Any, - password: Any, - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def get_screen_name(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class NotifoHandler(NotificationBaseHandler): - application_name: Any = ... - username: Any = ... - secret: Any = ... - hide_level: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - username: Optional[Any] = ..., - secret: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - hide_level: bool = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class PushoverHandler(NotificationBaseHandler): - application_name: Any = ... - apikey: Any = ... - userkey: Any = ... - device: Any = ... - priority: Any = ... - sound: Any = ... - max_title_len: Any = ... - max_message_len: Any = ... - title: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - apikey: Optional[Any] = ..., - userkey: Optional[Any] = ..., - device: Optional[Any] = ..., - priority: int = ..., - sound: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - max_title_len: int = ..., - max_message_len: int = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/queues.pyi b/third-party-stubs/logbook/queues.pyi deleted file mode 100644 index 96e757b2ee6..00000000000 --- a/third-party-stubs/logbook/queues.pyi +++ /dev/null @@ -1,154 +0,0 @@ -# Stubs for logbook.queues (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.handlers import Handler, WrapperHandler -from typing import Any, Optional - -class RedisHandler(Handler): - redis: Any = ... - key: Any = ... - extra_fields: Any = ... - flush_threshold: Any = ... - queue: Any = ... - lock: Any = ... - push_method: Any = ... - def __init__( - self, - host: str = ..., - port: int = ..., - key: str = ..., - extra_fields: Optional[Any] = ..., - flush_threshold: int = ..., - flush_time: int = ..., - level: Any = ..., - filter: Optional[Any] = ..., - password: bool = ..., - bubble: bool = ..., - context: Optional[Any] = ..., - push_method: str = ..., - ) -> None: ... - def disable_buffering(self) -> None: ... - def emit(self, record: Any) -> None: ... - def close(self) -> None: ... - -class MessageQueueHandler(Handler): - queue: Any = ... - def __init__( - self, - uri: Optional[Any] = ..., - queue: str = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def export_record(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def close(self) -> None: ... - -RabbitMQHandler = MessageQueueHandler - -class ZeroMQHandler(Handler): - context: Any = ... - socket: Any = ... - def __init__( - self, - uri: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - context: Optional[Any] = ..., - multi: bool = ..., - ) -> None: ... - def export_record(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def close(self, linger: int = ...) -> None: ... - def __del__(self) -> None: ... - -class ThreadController: - setup: Any = ... - subscriber: Any = ... - running: bool = ... - def __init__(self, subscriber: Any, setup: Optional[Any] = ...) -> None: ... - def start(self) -> None: ... - def stop(self) -> None: ... - -class SubscriberBase: - def recv(self, timeout: Optional[Any] = ...) -> Any: ... - def dispatch_once(self, timeout: Optional[Any] = ...): ... - def dispatch_forever(self) -> None: ... - def dispatch_in_background(self, setup: Optional[Any] = ...): ... - -class MessageQueueSubscriber(SubscriberBase): - queue: Any = ... - def __init__(self, uri: Optional[Any] = ..., queue: str = ...) -> None: ... - def __del__(self) -> None: ... - def close(self) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -RabbitMQSubscriber = MessageQueueSubscriber - -class ZeroMQSubscriber(SubscriberBase): - context: Any = ... - socket: Any = ... - def __init__( - self, uri: Optional[Any] = ..., context: Optional[Any] = ..., multi: bool = ... - ) -> None: ... - def __del__(self) -> None: ... - def close(self) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -class MultiProcessingHandler(Handler): - queue: Any = ... - def __init__( - self, queue: Any, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class MultiProcessingSubscriber(SubscriberBase): - queue: Any = ... - def __init__(self, queue: Optional[Any] = ...) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -class ExecnetChannelHandler(Handler): - channel: Any = ... - def __init__( - self, channel: Any, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class ExecnetChannelSubscriber(SubscriberBase): - channel: Any = ... - def __init__(self, channel: Any) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -class TWHThreadController: - class Command: - stop: Any = ... - emit: Any = ... - emit_batch: Any = ... - wrapper_handler: Any = ... - running: bool = ... - def __init__(self, wrapper_handler: Any) -> None: ... - def start(self) -> None: ... - def stop(self) -> None: ... - -class ThreadedWrapperHandler(WrapperHandler): - queue: Any = ... - controller: Any = ... - def __init__(self, handler: Any, maxsize: int = ...) -> None: ... - def close(self) -> None: ... - def emit(self, record: Any) -> None: ... - def emit_batch(self, records: Any, reason: Any) -> None: ... - -class GroupMember(ThreadController): - queue: Any = ... - def __init__(self, subscriber: Any, queue: Any) -> None: ... - -class SubscriberGroup(SubscriberBase): - members: Any = ... - queue: Any = ... - def __init__(self, subscribers: Optional[Any] = ..., queue_limit: int = ...) -> None: ... - def add(self, subscriber: Any) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - def stop(self) -> None: ... diff --git a/third-party-stubs/logbook/ticketing.pyi b/third-party-stubs/logbook/ticketing.pyi deleted file mode 100644 index 4435a206d26..00000000000 --- a/third-party-stubs/logbook/ticketing.pyi +++ /dev/null @@ -1,110 +0,0 @@ -# Stubs for logbook.ticketing (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.base import LogRecord -from logbook.handlers import Handler, HashingHandlerMixin -from typing import Any, Optional - -class Ticket: - level_name: Any = ... - db: Any = ... - def __init__(self, db: Any, row: Any) -> None: ... - def last_occurrence(self): ... - def get_occurrences(self, order_by: str = ..., limit: int = ..., offset: int = ...): ... - solved: bool = ... - def solve(self) -> None: ... - def delete(self) -> None: ... - __hash__: Any = ... - def __eq__(self, other: Any): ... - def __ne__(self, other: Any): ... - -class Occurrence(LogRecord): - db: Any = ... - time: Any = ... - ticket_id: Any = ... - occurrence_id: Any = ... - def __init__(self, db: Any, row: Any) -> None: ... - -class BackendBase: - options: Any = ... - def __init__(self, **options: Any) -> None: ... - def setup_backend(self) -> None: ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - def count_tickets(self) -> None: ... - def get_tickets(self, order_by: str = ..., limit: int = ..., offset: int = ...) -> None: ... - def solve_ticket(self, ticket_id: Any) -> None: ... - def delete_ticket(self, ticket_id: Any) -> None: ... - def get_ticket(self, ticket_id: Any) -> None: ... - def get_occurrences( - self, ticket: Any, order_by: str = ..., limit: int = ..., offset: int = ... - ) -> None: ... - -class SQLAlchemyBackend(BackendBase): - engine: Any = ... - session: Any = ... - table_prefix: Any = ... - metadata: Any = ... - def setup_backend(self) -> None: ... - tickets: Any = ... - occurrences: Any = ... - def create_tables(self): ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - def count_tickets(self): ... - def get_tickets(self, order_by: str = ..., limit: int = ..., offset: int = ...): ... - def solve_ticket(self, ticket_id: Any) -> None: ... - def delete_ticket(self, ticket_id: Any) -> None: ... - def get_ticket(self, ticket_id: Any): ... - def get_occurrences( - self, ticket: Any, order_by: str = ..., limit: int = ..., offset: int = ... - ): ... - -class MongoDBBackend(BackendBase): - class _FixedTicketClass(Ticket): - @property - def ticket_id(self): ... - - class _FixedOccurrenceClass(Occurrence): - db: Any = ... - time: Any = ... - ticket_id: Any = ... - occurrence_id: Any = ... - def __init__(self, db: Any, row: Any) -> None: ... - database: Any = ... - def setup_backend(self) -> None: ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - def count_tickets(self): ... - def get_tickets(self, order_by: str = ..., limit: int = ..., offset: int = ...): ... - def solve_ticket(self, ticket_id: Any) -> None: ... - def delete_ticket(self, ticket_id: Any) -> None: ... - def get_ticket(self, ticket_id: Any): ... - def get_occurrences( - self, ticket: Any, order_by: str = ..., limit: int = ..., offset: int = ... - ): ... - -class TicketingBaseHandler(Handler, HashingHandlerMixin): - hash_salt: Any = ... - def __init__( - self, hash_salt: Any, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - def hash_record_raw(self, record: Any): ... - -class TicketingHandler(TicketingBaseHandler): - default_backend: Any = ... - app_id: Any = ... - def __init__( - self, - uri: Any, - app_id: str = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - hash_salt: Optional[Any] = ..., - backend: Optional[Any] = ..., - **db_options: Any, - ) -> None: ... - db: Any = ... - def set_backend(self, cls: Any, **options: Any) -> None: ... - def process_record(self, record: Any, hash: Any): ... - def record_ticket(self, record: Any, data: Any, hash: Any) -> None: ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/utils.pyi b/third-party-stubs/logbook/utils.pyi deleted file mode 100644 index 27f9bb5d818..00000000000 --- a/third-party-stubs/logbook/utils.pyi +++ /dev/null @@ -1,39 +0,0 @@ -# Stubs for logbook.utils (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -import threading -from .base import DEBUG, Logger -from .helpers import string_types -from typing import Any, Optional - -class _SlowContextNotifier: - timer: Any = ... - def __init__(self, threshold: Any, func: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, *_: Any) -> None: ... - -def logged_if_slow(*args: Any, **kwargs: Any): ... - -class _Local(threading.local): - enabled: bool = ... - -def suppressed_deprecations() -> None: ... -def forget_deprecation_locations() -> None: ... -def log_deprecation_message(message: Any, frame_correction: int = ...) -> None: ... - -class _DeprecatedFunction: - def __init__( - self, func: Any, message: Any, obj: Optional[Any] = ..., objtype: Optional[Any] = ... - ) -> None: ... - def __call__(self, *args: Any, **kwargs: Any): ... - def __get__(self, obj: Any, objtype: Any): ... - def bound_to(self, obj: Any, objtype: Any): ... - @property - def __name__(self): ... - @property - def __doc__(self): ... - @__doc__.setter - def __doc__(self, doc: Any) -> None: ... - -def deprecated(func: Optional[Any] = ..., message: Optional[Any] = ...): ... diff --git a/third-party-stubs/mashumaro/jsonschema/builder.pyi b/third-party-stubs/mashumaro/jsonschema/builder.pyi index 98bbc860298..8f973240a85 100644 --- a/third-party-stubs/mashumaro/jsonschema/builder.pyi +++ b/third-party-stubs/mashumaro/jsonschema/builder.pyi @@ -16,7 +16,7 @@ def build_json_schema( class JSONSchemaDefinitions(DataClassJSONMixin): definitions: Dict[str, JSONSchema] - def __post_serialize__(self, d: Dict[Any, Any]) -> List[Dict[str, Any]]: ... # type: ignore + def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> List[Dict[str, Any]]: ... # type: ignore def __init__(self, definitions) -> None: ... class JSONSchemaBuilder: diff --git a/third-party-stubs/mashumaro/jsonschema/models.pyi b/third-party-stubs/mashumaro/jsonschema/models.pyi index b67db67b20b..6022d3d129f 100644 --- a/third-party-stubs/mashumaro/jsonschema/models.pyi +++ b/third-party-stubs/mashumaro/jsonschema/models.pyi @@ -106,8 +106,8 @@ class JSONSchema(DataClassJSONMixin): serialize_by_alias: bool aliases: Incomplete serialization_strategy: Incomplete - def __pre_serialize__(self) -> JSONSchema: ... - def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: ... + def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ... + def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ... def __init__( self, schema, diff --git a/third-party-stubs/mashumaro/mixins/dict.pyi b/third-party-stubs/mashumaro/mixins/dict.pyi index 877283960a9..c6ec9accad1 100644 --- a/third-party-stubs/mashumaro/mixins/dict.pyi +++ b/third-party-stubs/mashumaro/mixins/dict.pyi @@ -1,4 +1,4 @@ -from typing import Any, Dict, Mapping, Type, TypeVar +from typing import Any, Dict, Mapping, Type, TypeVar, Optional T = TypeVar("T", bound="DataClassDictMixin") @@ -11,5 +11,5 @@ class DataClassDictMixin: def __pre_deserialize__(cls: Type[T], d: Dict[Any, Any]) -> Dict[Any, Any]: ... @classmethod def __post_deserialize__(cls: Type[T], obj: T) -> T: ... - def __pre_serialize__(self: T) -> T: ... - def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: ... + def __pre_serialize__(self: T, context: Optional[Dict]) -> T: ... + def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ...