mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-18 19:41:40 +00:00
Compare commits
166 Commits
remove-ssl
...
v1.7.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f5ddd27ac | ||
|
|
6e331833b0 | ||
|
|
d338b3e065 | ||
|
|
9f0bcf5666 | ||
|
|
d65179b24e | ||
|
|
0ef8638ab4 | ||
|
|
fc2d16fd34 | ||
|
|
de35686532 | ||
|
|
cf3714d2c1 | ||
|
|
09f5bb3dcf | ||
|
|
f94bf2bba4 | ||
|
|
ae75cc3a62 | ||
|
|
1e45a751a5 | ||
|
|
b5885da54e | ||
|
|
73ebe53273 | ||
|
|
2a9c3689a4 | ||
|
|
67d8ce398f | ||
|
|
7eb6cdbbfb | ||
|
|
6ba3dc211c | ||
|
|
ce86238389 | ||
|
|
7d60d6361e | ||
|
|
bebd6ca0e1 | ||
|
|
66cb5a0e7c | ||
|
|
2401600e57 | ||
|
|
a8fcf77831 | ||
|
|
0995825793 | ||
|
|
eeaaec4de9 | ||
|
|
1cbab8079a | ||
|
|
333d793cf0 | ||
|
|
b6f0eac2cd | ||
|
|
5e13698b2b | ||
|
|
12c1cbbc64 | ||
|
|
ced70d55c3 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
26c7675c28 | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
b39eeb328c | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0b1
|
||||
current_version = 1.7.5
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
157
.changes/1.7.0.md
Normal file
157
.changes/1.7.0.md
Normal file
@@ -0,0 +1,157 @@
|
||||
## dbt-core 1.7.0 - November 02, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- add log file of installed packages via dbt deps ([#6643](https://github.com/dbt-labs/dbt-core/issues/6643))
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
- Add node attributes related to compilation to run_results.json ([#7519](https://github.com/dbt-labs/dbt-core/issues/7519))
|
||||
- Add --no-inject-ephemeral-ctes flag for `compile` command, for usage by linting. ([#8480](https://github.com/dbt-labs/dbt-core/issues/8480))
|
||||
- Support configuration of semantic models with the addition of enable/disable and group enablement. ([#7968](https://github.com/dbt-labs/dbt-core/issues/7968))
|
||||
- Accept a `dbt-cloud` config in dbt_project.yml ([#8438](https://github.com/dbt-labs/dbt-core/issues/8438))
|
||||
- Support atomic replace in the global replace macro ([#8539](https://github.com/dbt-labs/dbt-core/issues/8539))
|
||||
- Use translate_type on data_type in model.columns in templates by default, remove no op `TYPE_LABELS` ([#8007](https://github.com/dbt-labs/dbt-core/issues/8007))
|
||||
- Add an option to generate static documentation ([#8614](https://github.com/dbt-labs/dbt-core/issues/8614))
|
||||
- Allow setting "access" as a config in addition to as a property ([#8383](https://github.com/dbt-labs/dbt-core/issues/8383))
|
||||
- Loosen typing requirement on renameable/replaceable relations to Iterable to allow adapters more flexibility in registering relation types, include docstrings as suggestions ([#8647](https://github.com/dbt-labs/dbt-core/issues/8647))
|
||||
- Add support for optional label in semantic_models, measures, dimensions and entities. ([#8595](https://github.com/dbt-labs/dbt-core/issues/8595), [#8755](https://github.com/dbt-labs/dbt-core/issues/8755))
|
||||
- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859))
|
||||
- Support storing test failures as views ([#6914](https://github.com/dbt-labs/dbt-core/issues/6914))
|
||||
- resolve packages with same git repo and unique subdirectory ([#5374](https://github.com/dbt-labs/dbt-core/issues/5374))
|
||||
- Add new ResourceReport event to record memory/cpu/io metrics ([#8342](https://github.com/dbt-labs/dbt-core/issues/8342))
|
||||
- Adding `date_spine` macro (and supporting macros) from dbt-utils to dbt-core ([#8172](https://github.com/dbt-labs/dbt-core/issues/8172))
|
||||
- Support `fill_nulls_with` and `join_to_timespine` for metric nodes ([#8593](https://github.com/dbt-labs/dbt-core/issues/8593), [#8755](https://github.com/dbt-labs/dbt-core/issues/8755))
|
||||
- Raise a warning when a contracted model has a numeric field without scale defined ([#8183](https://github.com/dbt-labs/dbt-core/issues/8183))
|
||||
- Added support for retrieving partial catalog information from a schema ([#8521](https://github.com/dbt-labs/dbt-core/issues/8521))
|
||||
- Add meta attribute to SemanticModels config ([#8511](https://github.com/dbt-labs/dbt-core/issues/8511))
|
||||
- Selectors with docs generate limits catalog generation ([#6014](https://github.com/dbt-labs/dbt-core/issues/6014))
|
||||
- Allow freshness to be determined via DBMS metadata for supported adapters ([#8704](https://github.com/dbt-labs/dbt-core/issues/8704))
|
||||
- Add support semantic layer SavedQuery node type ([#8594](https://github.com/dbt-labs/dbt-core/issues/8594))
|
||||
- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- If --profile specified with dbt-init, create the project with the specified profile ([#6154](https://github.com/dbt-labs/dbt-core/issues/6154))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Add explicit support for integers for the show command ([#8153](https://github.com/dbt-labs/dbt-core/issues/8153))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
- Add support for swapping materialized views with tables/views and vice versa ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Turn breaking changes to contracted models into warnings for unversioned models ([#8384](https://github.com/dbt-labs/dbt-core/issues/8384), [#8282](https://github.com/dbt-labs/dbt-core/issues/8282))
|
||||
- Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension` ([#8453](https://github.com/dbt-labs/dbt-core/issues/8453))
|
||||
- fix ambiguous reference error for tests and versions when model name is duplicated across packages ([#8327](https://github.com/dbt-labs/dbt-core/issues/8327), [#8493](https://github.com/dbt-labs/dbt-core/issues/8493))
|
||||
- Fix "Internal Error: Expected node <unique-id> not found in manifest" when depends_on set on ModelNodeArgs ([#8506](https://github.com/dbt-labs/dbt-core/issues/8506))
|
||||
- Fix snapshot success message ([#7583](https://github.com/dbt-labs/dbt-core/issues/7583))
|
||||
- Parse the correct schema version from manifest ([#8544](https://github.com/dbt-labs/dbt-core/issues/8544))
|
||||
- make version comparison insensitive to order ([#8571](https://github.com/dbt-labs/dbt-core/issues/8571))
|
||||
- Update metric helper functions to work with new semantic layer metrics ([#8134](https://github.com/dbt-labs/dbt-core/issues/8134))
|
||||
- Disallow cleaning paths outside current working directory ([#8318](https://github.com/dbt-labs/dbt-core/issues/8318))
|
||||
- Warn when --state == --target ([#8160](https://github.com/dbt-labs/dbt-core/issues/8160))
|
||||
- update dbt show to include limit in DWH query ([#8496,](https://github.com/dbt-labs/dbt-core/issues/8496,), [#8417](https://github.com/dbt-labs/dbt-core/issues/8417))
|
||||
- Support quoted parameter list for MultiOption CLI options. ([#8598](https://github.com/dbt-labs/dbt-core/issues/8598))
|
||||
- Support global flags passed in after subcommands ([#6497](https://github.com/dbt-labs/dbt-core/issues/6497))
|
||||
- Lower bound of `8.0.2` for `click` ([#8683](https://github.com/dbt-labs/dbt-core/issues/8683))
|
||||
- Fixes test type edges filter ([#8692](https://github.com/dbt-labs/dbt-core/issues/8692))
|
||||
- semantic models in graph selection ([#8589](https://github.com/dbt-labs/dbt-core/issues/8589))
|
||||
- Support doc blocks in nested semantic model YAML ([#8509](https://github.com/dbt-labs/dbt-core/issues/8509))
|
||||
- avoid double-rendering sql_header in dbt show ([#8739](https://github.com/dbt-labs/dbt-core/issues/8739))
|
||||
- Fix tag selection for projects with semantic models ([#8749](https://github.com/dbt-labs/dbt-core/issues/8749))
|
||||
- Foreign key constraint on incremental model results in Database Error ([#8022](https://github.com/dbt-labs/dbt-core/issues/8022))
|
||||
- Support docs blocks on versioned model column descriptions ([#8540](https://github.com/dbt-labs/dbt-core/issues/8540))
|
||||
- Enable seeds to be handled from stored manifest data ([#6875](https://github.com/dbt-labs/dbt-core/issues/6875))
|
||||
- Override path-like args in dbt retry ([#8682](https://github.com/dbt-labs/dbt-core/issues/8682))
|
||||
- Group updates on unmodified nodes are handled gracefully for state:modified ([#8371](https://github.com/dbt-labs/dbt-core/issues/8371))
|
||||
- Partial parsing fix for adding groups and updating models at the same time ([#8697](https://github.com/dbt-labs/dbt-core/issues/8697))
|
||||
- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859))
|
||||
- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846))
|
||||
- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857))
|
||||
- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836))
|
||||
- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939))
|
||||
- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864))
|
||||
- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Fix newline escapes and improve formatting in docker README ([dbt-docs/#8211](https://github.com/dbt-labs/dbt-docs/issues/8211))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Switch from hologram to mashumaro jsonschema ([#8426](https://github.com/dbt-labs/dbt-core/issues/8426))
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
- Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>, relation agnostic in /macros/relations ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Update typing to meet mypy standards ([#8396](https://github.com/dbt-labs/dbt-core/issues/8396))
|
||||
- Mypy errors - adapters/factory.py ([#8387](https://github.com/dbt-labs/dbt-core/issues/8387))
|
||||
- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537))
|
||||
- Audit potential circular dependencies ([#8349](https://github.com/dbt-labs/dbt-core/issues/8349))
|
||||
- Add functional test for advanced ref override ([#8566](https://github.com/dbt-labs/dbt-core/issues/8566))
|
||||
- Add typing to __init__ in base.py ([#8398](https://github.com/dbt-labs/dbt-core/issues/8398))
|
||||
- Fix untyped functions in task/runnable.py (mypy warning) ([#8402](https://github.com/dbt-labs/dbt-core/issues/8402))
|
||||
- add a test for ephemeral cte injection ([#8225](https://github.com/dbt-labs/dbt-core/issues/8225))
|
||||
- Fix test_numeric_values to look for more specific strings ([#8470](https://github.com/dbt-labs/dbt-core/issues/8470))
|
||||
- Pin types-requests<2.31.0 in `dev-requirements.txt` ([#8789](https://github.com/dbt-labs/dbt-core/issues/8789))
|
||||
- Add warning_tag to UnversionedBreakingChange ([#8827](https://github.com/dbt-labs/dbt-core/issues/8827))
|
||||
- Update v10 manifest schema to match 1.6 for testing schema compatibility ([#8835](https://github.com/dbt-labs/dbt-core/issues/8835))
|
||||
- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
- Bump docker/build-push-action from 4 to 5 ([#8783](https://github.com/dbt-labs/dbt-core/pull/8783))
|
||||
- Upgrade dbt-semantic-interfaces dep to 0.3.0 ([#8819](https://github.com/dbt-labs/dbt-core/pull/8819))
|
||||
- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/pull/8892))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@benmosher](https://github.com/benmosher) ([#8480](https://github.com/dbt-labs/dbt-core/issues/8480))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#8153](https://github.com/dbt-labs/dbt-core/issues/8153), [#8589](https://github.com/dbt-labs/dbt-core/issues/8589))
|
||||
- [@dylan-murray](https://github.com/dylan-murray) ([#8683](https://github.com/dbt-labs/dbt-core/issues/8683))
|
||||
- [@ezraerb](https://github.com/ezraerb) ([#6154](https://github.com/dbt-labs/dbt-core/issues/6154))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@jamezrin](https://github.com/jamezrin) ([#8211](https://github.com/dbt-labs/dbt-core/issues/8211))
|
||||
- [@jusbaldw](https://github.com/jusbaldw) ([#6643](https://github.com/dbt-labs/dbt-core/issues/6643))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@mescanne](https://github.com/mescanne) ([#8614](https://github.com/dbt-labs/dbt-core/issues/8614))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@philippeboyd](https://github.com/philippeboyd) ([#5374](https://github.com/dbt-labs/dbt-core/issues/5374))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- [@renanleme](https://github.com/renanleme) ([#8692](https://github.com/dbt-labs/dbt-core/issues/8692))
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Breaking Changes
|
||||
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
|
||||
the existing RunResultError in their place.
|
||||
time: 2023-07-25T17:13:59.441682-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7963"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.4.0 to 1.4.1"
|
||||
time: 2023-07-26T20:17:40.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8219
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Update pin for click<9
|
||||
time: 2023-07-27T14:57:03.180458-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8232"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Add upper bound to sqlparse pin of <0.5
|
||||
time: 2023-07-27T14:57:26.40416-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8236"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Support dbt-semantic-interfaces 0.2.0
|
||||
time: 2023-07-28T13:52:27.207241-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "8250"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: fixed comment util.py
|
||||
time: 2023-07-27T17:09:00.089237+09:00
|
||||
custom:
|
||||
Author: d-kaneshiro
|
||||
Issue: None
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Display contract and column constraints on the model page
|
||||
time: 2023-08-04T13:18:15.627005-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "433"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Display semantic model details in docs
|
||||
time: 2023-08-07T15:25:48.711627-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "431"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Enable re-population of metadata vars post-environment change during programmatic
|
||||
invocation
|
||||
time: 2023-07-02T12:28:13.416305-04:00
|
||||
custom:
|
||||
Author: gem7318
|
||||
Issue: "8010"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Added support to configure a delimiter for a seed file, defaults to comma
|
||||
time: 2023-07-14T20:24:45.513847165+02:00
|
||||
custom:
|
||||
Author: ramonvermeulen
|
||||
Issue: "3990"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: 'Allow specification of `create_metric: true` on measures'
|
||||
time: 2023-08-03T15:18:24.351003-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8125"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Copy dir during `dbt deps` if symlink fails
|
||||
time: 2023-04-24T21:07:34.336797+05:30
|
||||
custom:
|
||||
Author: anjutiwari
|
||||
Issue: "7428 8223"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Copy target_schema from config into snapshot node
|
||||
time: 2023-07-17T16:06:52.957724-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6745"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||
time: 2023-07-27T12:58:30.673803-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8230"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||
etc)
|
||||
time: 2023-07-28T11:56:20.863718-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8166"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix retry not working with log-file-max-bytes
|
||||
time: 2023-08-02T14:15:56.306027-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8297"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Detect changes to model access, version, or latest_version in state:modified
|
||||
time: 2023-08-06T22:23:19.166334-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8189"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add connection status into list of statuses for dbt debug
|
||||
time: 2023-08-10T18:48:59.221344+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8350"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: fix fqn-selection for external versioned models
|
||||
time: 2023-08-11T20:41:44.725144-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8374"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: 'Fix: DbtInternalError after model that previously ref''d external model is
|
||||
deleted'
|
||||
time: 2023-08-11T21:20:08.145554-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8375"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix using list command with path selector and project-dir
|
||||
time: 2023-08-14T14:57:02.02816-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8385"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Remedy performance regression by only writing run_results.json once.
|
||||
time: 2023-08-15T10:44:44.836991-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8360"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Use python version 3.10.7 in Docker image.
|
||||
time: 2023-08-17T13:09:15.936349-05:00
|
||||
custom:
|
||||
Author: McKnight-42
|
||||
Issue: "8444"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Bump manifest schema version to v11, freeze manifest v10
|
||||
time: 2023-08-07T16:45:09.712744-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8333"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: add tracking for plugin.get_nodes calls
|
||||
time: 2023-08-09T09:48:34.819445-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8344"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: 'add internal flag: --no-partial-parse-file-diff to inform whether to compute
|
||||
a file diff during partial parsing'
|
||||
time: 2023-08-11T10:09:02.832241-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8363"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add return values to a number of functions for mypy
|
||||
time: 2023-08-15T17:03:07.895252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8389"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix mypy warnings for ManifestLoader.load()
|
||||
time: 2023-08-17T13:45:48.937252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8401"
|
||||
8
.changes/1.7.1.md
Normal file
8
.changes/1.7.1.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.7.1 - November 07, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895))
|
||||
- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010))
|
||||
- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016))
|
||||
- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000))
|
||||
16
.changes/1.7.2.md
Normal file
16
.changes/1.7.2.md
Normal file
@@ -0,0 +1,16 @@
|
||||
## dbt-core 1.7.2 - November 16, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
|
||||
### Contributors
|
||||
- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
7
.changes/1.7.3.md
Normal file
7
.changes/1.7.3.md
Normal file
@@ -0,0 +1,7 @@
|
||||
## dbt-core 1.7.3 - November 29, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050))
|
||||
- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127))
|
||||
- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119))
|
||||
12
.changes/1.7.4.md
Normal file
12
.changes/1.7.4.md
Normal file
@@ -0,0 +1,12 @@
|
||||
## dbt-core 1.7.4 - December 14, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991))
|
||||
|
||||
### Contributors
|
||||
- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
8
.changes/1.7.5.md
Normal file
8
.changes/1.7.5.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.7.5 - January 18, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
|
||||
### Contributors
|
||||
- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix newline escapes and improve formatting in docker README
|
||||
time: 2023-07-28T19:34:38.351042747+02:00
|
||||
custom:
|
||||
Author: jamezrin
|
||||
Issue: "8211"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension`
|
||||
time: 2023-08-18T09:53:48.154848-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8453"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Turn breaking changes to contracted models into warnings for unversioned models
|
||||
time: 2023-08-18T10:38:02.251286-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: 8384 8282
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: fix ambiguous reference error for tests and versions when model name is duplicated across
|
||||
packages
|
||||
time: 2023-08-24T16:10:24.437362-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8327 8493"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: 'Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>,
|
||||
relation agnostic in /macros/relations'
|
||||
time: 2023-08-21T13:48:01.474731-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8449"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Update typing to meet mypy standards
|
||||
time: 2023-08-23T19:42:37.130694-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8396"
|
||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -13,23 +13,6 @@
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
@@ -40,7 +23,7 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -28,3 +28,10 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# github dependencies
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Add the `user docs` label to this PR if it will need docs changes. An
|
||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
@@ -33,3 +30,4 @@ resolves #
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
10
.github/workflows/docs-issue.yml
vendored
10
.github/workflows/docs-issue.yml
vendored
@@ -27,11 +27,17 @@ permissions:
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
if: contains( github.event.pull_request.labels.*.name, 'user docs') && github.event.pull_request.merged == true
|
||||
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.pull_request.merged == true) &&
|
||||
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_labels: "content,improvement,dbt Core"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
|
||||
6
.github/workflows/release-docker.yml
vendored
6
.github/workflows/release-docker.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# **what?**
|
||||
# Cleanup branches left over from automation and testing. Also cleanup
|
||||
# draft releases from release testing.
|
||||
|
||||
# **why?**
|
||||
# The automations are leaving behind branches and releases that clutter
|
||||
# the repository. Sometimes we need them to debug processes so we don't
|
||||
# want them immediately deleted. Running on Saturday to avoid running
|
||||
# at the same time as an actual release to prevent breaking a release
|
||||
# mid-release.
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 12:00 Saturday.
|
||||
# Manual trigger can also run on demand
|
||||
|
||||
name: Repository Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cleanup-repo:
|
||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||
secrets: inherit
|
||||
147
CHANGELOG.md
147
CHANGELOG.md
@@ -5,7 +5,64 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
## dbt-core 1.7.5 - January 18, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
|
||||
### Contributors
|
||||
- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
|
||||
|
||||
## dbt-core 1.7.4 - December 14, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991))
|
||||
|
||||
### Contributors
|
||||
- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
|
||||
## dbt-core 1.7.3 - November 29, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050))
|
||||
- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127))
|
||||
- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119))
|
||||
|
||||
## dbt-core 1.7.2 - November 16, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
|
||||
### Contributors
|
||||
- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
|
||||
## dbt-core 1.7.1 - November 07, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895))
|
||||
- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010))
|
||||
- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016))
|
||||
- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000))
|
||||
|
||||
## dbt-core 1.7.0 - November 02, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
@@ -13,13 +70,38 @@
|
||||
|
||||
### Features
|
||||
|
||||
- add log file of installed packages via dbt deps ([#6643](https://github.com/dbt-labs/dbt-core/issues/6643))
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
- Add node attributes related to compilation to run_results.json ([#7519](https://github.com/dbt-labs/dbt-core/issues/7519))
|
||||
- Add --no-inject-ephemeral-ctes flag for `compile` command, for usage by linting. ([#8480](https://github.com/dbt-labs/dbt-core/issues/8480))
|
||||
- Support configuration of semantic models with the addition of enable/disable and group enablement. ([#7968](https://github.com/dbt-labs/dbt-core/issues/7968))
|
||||
- Accept a `dbt-cloud` config in dbt_project.yml ([#8438](https://github.com/dbt-labs/dbt-core/issues/8438))
|
||||
- Support atomic replace in the global replace macro ([#8539](https://github.com/dbt-labs/dbt-core/issues/8539))
|
||||
- Use translate_type on data_type in model.columns in templates by default, remove no op `TYPE_LABELS` ([#8007](https://github.com/dbt-labs/dbt-core/issues/8007))
|
||||
- Add an option to generate static documentation ([#8614](https://github.com/dbt-labs/dbt-core/issues/8614))
|
||||
- Allow setting "access" as a config in addition to as a property ([#8383](https://github.com/dbt-labs/dbt-core/issues/8383))
|
||||
- Loosen typing requirement on renameable/replaceable relations to Iterable to allow adapters more flexibility in registering relation types, include docstrings as suggestions ([#8647](https://github.com/dbt-labs/dbt-core/issues/8647))
|
||||
- Add support for optional label in semantic_models, measures, dimensions and entities. ([#8595](https://github.com/dbt-labs/dbt-core/issues/8595), [#8755](https://github.com/dbt-labs/dbt-core/issues/8755))
|
||||
- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859))
|
||||
- Support storing test failures as views ([#6914](https://github.com/dbt-labs/dbt-core/issues/6914))
|
||||
- resolve packages with same git repo and unique subdirectory ([#5374](https://github.com/dbt-labs/dbt-core/issues/5374))
|
||||
- Add new ResourceReport event to record memory/cpu/io metrics ([#8342](https://github.com/dbt-labs/dbt-core/issues/8342))
|
||||
- Adding `date_spine` macro (and supporting macros) from dbt-utils to dbt-core ([#8172](https://github.com/dbt-labs/dbt-core/issues/8172))
|
||||
- Support `fill_nulls_with` and `join_to_timespine` for metric nodes ([#8593](https://github.com/dbt-labs/dbt-core/issues/8593), [#8755](https://github.com/dbt-labs/dbt-core/issues/8755))
|
||||
- Raise a warning when a contracted model has a numeric field without scale defined ([#8183](https://github.com/dbt-labs/dbt-core/issues/8183))
|
||||
- Added support for retrieving partial catalog information from a schema ([#8521](https://github.com/dbt-labs/dbt-core/issues/8521))
|
||||
- Add meta attribute to SemanticModels config ([#8511](https://github.com/dbt-labs/dbt-core/issues/8511))
|
||||
- Selectors with docs generate limits catalog generation ([#6014](https://github.com/dbt-labs/dbt-core/issues/6014))
|
||||
- Allow freshness to be determined via DBMS metadata for supported adapters ([#8704](https://github.com/dbt-labs/dbt-core/issues/8704))
|
||||
- Add support semantic layer SavedQuery node type ([#8594](https://github.com/dbt-labs/dbt-core/issues/8594))
|
||||
- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- If --profile specified with dbt-init, create the project with the specified profile ([#6154](https://github.com/dbt-labs/dbt-core/issues/6154))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
@@ -31,23 +113,59 @@
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Add explicit support for integers for the show command ([#8153](https://github.com/dbt-labs/dbt-core/issues/8153))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
- Add support for swapping materialized views with tables/views and vice versa ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Turn breaking changes to contracted models into warnings for unversioned models ([#8384](https://github.com/dbt-labs/dbt-core/issues/8384), [#8282](https://github.com/dbt-labs/dbt-core/issues/8282))
|
||||
- Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension` ([#8453](https://github.com/dbt-labs/dbt-core/issues/8453))
|
||||
- fix ambiguous reference error for tests and versions when model name is duplicated across packages ([#8327](https://github.com/dbt-labs/dbt-core/issues/8327), [#8493](https://github.com/dbt-labs/dbt-core/issues/8493))
|
||||
- Fix "Internal Error: Expected node <unique-id> not found in manifest" when depends_on set on ModelNodeArgs ([#8506](https://github.com/dbt-labs/dbt-core/issues/8506))
|
||||
- Fix snapshot success message ([#7583](https://github.com/dbt-labs/dbt-core/issues/7583))
|
||||
- Parse the correct schema version from manifest ([#8544](https://github.com/dbt-labs/dbt-core/issues/8544))
|
||||
- make version comparison insensitive to order ([#8571](https://github.com/dbt-labs/dbt-core/issues/8571))
|
||||
- Update metric helper functions to work with new semantic layer metrics ([#8134](https://github.com/dbt-labs/dbt-core/issues/8134))
|
||||
- Disallow cleaning paths outside current working directory ([#8318](https://github.com/dbt-labs/dbt-core/issues/8318))
|
||||
- Warn when --state == --target ([#8160](https://github.com/dbt-labs/dbt-core/issues/8160))
|
||||
- update dbt show to include limit in DWH query ([#8496,](https://github.com/dbt-labs/dbt-core/issues/8496,), [#8417](https://github.com/dbt-labs/dbt-core/issues/8417))
|
||||
- Support quoted parameter list for MultiOption CLI options. ([#8598](https://github.com/dbt-labs/dbt-core/issues/8598))
|
||||
- Support global flags passed in after subcommands ([#6497](https://github.com/dbt-labs/dbt-core/issues/6497))
|
||||
- Lower bound of `8.0.2` for `click` ([#8683](https://github.com/dbt-labs/dbt-core/issues/8683))
|
||||
- Fixes test type edges filter ([#8692](https://github.com/dbt-labs/dbt-core/issues/8692))
|
||||
- semantic models in graph selection ([#8589](https://github.com/dbt-labs/dbt-core/issues/8589))
|
||||
- Support doc blocks in nested semantic model YAML ([#8509](https://github.com/dbt-labs/dbt-core/issues/8509))
|
||||
- avoid double-rendering sql_header in dbt show ([#8739](https://github.com/dbt-labs/dbt-core/issues/8739))
|
||||
- Fix tag selection for projects with semantic models ([#8749](https://github.com/dbt-labs/dbt-core/issues/8749))
|
||||
- Foreign key constraint on incremental model results in Database Error ([#8022](https://github.com/dbt-labs/dbt-core/issues/8022))
|
||||
- Support docs blocks on versioned model column descriptions ([#8540](https://github.com/dbt-labs/dbt-core/issues/8540))
|
||||
- Enable seeds to be handled from stored manifest data ([#6875](https://github.com/dbt-labs/dbt-core/issues/6875))
|
||||
- Override path-like args in dbt retry ([#8682](https://github.com/dbt-labs/dbt-core/issues/8682))
|
||||
- Group updates on unmodified nodes are handled gracefully for state:modified ([#8371](https://github.com/dbt-labs/dbt-core/issues/8371))
|
||||
- Partial parsing fix for adding groups and updating models at the same time ([#8697](https://github.com/dbt-labs/dbt-core/issues/8697))
|
||||
- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859))
|
||||
- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846))
|
||||
- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857))
|
||||
- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836))
|
||||
- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939))
|
||||
- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864))
|
||||
- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Fix newline escapes and improve formatting in docker README ([dbt-docs/#8211](https://github.com/dbt-labs/dbt-docs/issues/8211))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Switch from hologram to mashumaro jsonschema ([#8426](https://github.com/dbt-labs/dbt-core/issues/8426))
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
@@ -58,6 +176,20 @@
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
- Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>, relation agnostic in /macros/relations ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Update typing to meet mypy standards ([#8396](https://github.com/dbt-labs/dbt-core/issues/8396))
|
||||
- Mypy errors - adapters/factory.py ([#8387](https://github.com/dbt-labs/dbt-core/issues/8387))
|
||||
- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537))
|
||||
- Audit potential circular dependencies ([#8349](https://github.com/dbt-labs/dbt-core/issues/8349))
|
||||
- Add functional test for advanced ref override ([#8566](https://github.com/dbt-labs/dbt-core/issues/8566))
|
||||
- Add typing to __init__ in base.py ([#8398](https://github.com/dbt-labs/dbt-core/issues/8398))
|
||||
- Fix untyped functions in task/runnable.py (mypy warning) ([#8402](https://github.com/dbt-labs/dbt-core/issues/8402))
|
||||
- add a test for ephemeral cte injection ([#8225](https://github.com/dbt-labs/dbt-core/issues/8225))
|
||||
- Fix test_numeric_values to look for more specific strings ([#8470](https://github.com/dbt-labs/dbt-core/issues/8470))
|
||||
- Pin types-requests<2.31.0 in `dev-requirements.txt` ([#8789](https://github.com/dbt-labs/dbt-core/issues/8789))
|
||||
- Add warning_tag to UnversionedBreakingChange ([#8827](https://github.com/dbt-labs/dbt-core/issues/8827))
|
||||
- Update v10 manifest schema to match 1.6 for testing schema compatibility ([#8835](https://github.com/dbt-labs/dbt-core/issues/8835))
|
||||
- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893))
|
||||
|
||||
### Dependencies
|
||||
|
||||
@@ -66,16 +198,27 @@
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
- Bump docker/build-push-action from 4 to 5 ([#8783](https://github.com/dbt-labs/dbt-core/pull/8783))
|
||||
- Upgrade dbt-semantic-interfaces dep to 0.3.0 ([#8819](https://github.com/dbt-labs/dbt-core/pull/8819))
|
||||
- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/pull/8892))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@benmosher](https://github.com/benmosher) ([#8480](https://github.com/dbt-labs/dbt-core/issues/8480))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#8153](https://github.com/dbt-labs/dbt-core/issues/8153), [#8589](https://github.com/dbt-labs/dbt-core/issues/8589))
|
||||
- [@dylan-murray](https://github.com/dylan-murray) ([#8683](https://github.com/dbt-labs/dbt-core/issues/8683))
|
||||
- [@ezraerb](https://github.com/ezraerb) ([#6154](https://github.com/dbt-labs/dbt-core/issues/6154))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@jamezrin](https://github.com/jamezrin) ([#8211](https://github.com/dbt-labs/dbt-core/issues/8211))
|
||||
- [@jusbaldw](https://github.com/jusbaldw) ([#6643](https://github.com/dbt-labs/dbt-core/issues/6643))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@mescanne](https://github.com/mescanne) ([#8614](https://github.com/dbt-labs/dbt-core/issues/8614))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@philippeboyd](https://github.com/philippeboyd) ([#5374](https://github.com/dbt-labs/dbt-core/issues/5374))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
|
||||
- [@renanleme](https://github.com/renanleme) ([#8692](https://github.com/dbt-labs/dbt-core/issues/8692))
|
||||
|
||||
## Previous Releases
|
||||
|
||||
|
||||
@@ -6,4 +6,8 @@ coverage:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.01% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
@@ -7,12 +7,12 @@ from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
# Note: This is automatically used by contract code
|
||||
# No-op conversions (INTEGER => INT) have been removed.
|
||||
# Any adapter that wants to take advantage of "translate_type"
|
||||
# should create a ClassVar with the appropriate conversions.
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
"STRING": "TEXT",
|
||||
"TIMESTAMP": "TIMESTAMP",
|
||||
"FLOAT": "FLOAT",
|
||||
"INTEGER": "INT",
|
||||
"BOOLEAN": "BOOLEAN",
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
|
||||
@@ -72,7 +72,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
def __init__(self, profile: AdapterRequiredConfig) -> None:
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
|
||||
@@ -17,9 +17,11 @@ from typing import (
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
|
||||
from dbt.adapters.capability import Capability, CapabilityDict
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
|
||||
import agate
|
||||
@@ -44,7 +46,13 @@ from dbt.exceptions import (
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.agate_helper import (
|
||||
empty_table,
|
||||
get_column_value_uncased,
|
||||
merge_tables,
|
||||
table_from_rows,
|
||||
Integer,
|
||||
)
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
@@ -74,7 +82,9 @@ from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
@@ -109,7 +119,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
||||
return test
|
||||
|
||||
|
||||
def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
|
||||
def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
|
||||
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
@@ -161,6 +171,12 @@ class PythonJobHelper:
|
||||
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
||||
|
||||
|
||||
class FreshnessResponse(TypedDict):
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float # age in seconds
|
||||
|
||||
|
||||
class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""The BaseAdapter provides an abstract base class for adapters.
|
||||
|
||||
@@ -222,7 +238,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
# This static member variable can be overriden in concrete adapter
|
||||
# implementations to indicate adapter support for optional capabilities.
|
||||
_capabilities = CapabilityDict({})
|
||||
|
||||
def __init__(self, config) -> None:
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
self.connections = self.ConnectionManager(config)
|
||||
@@ -415,7 +435,30 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
relations = self._get_catalog_relations(manifest)
|
||||
for relation in relations:
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
# are sets of lowercase schema names that are valid members of those
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _get_catalog_relations_by_info_schema(
|
||||
self, relations
|
||||
) -> Dict[InformationSchema, List[BaseRelation]]:
|
||||
relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
|
||||
for relation in relations:
|
||||
info_schema = relation.information_schema_only()
|
||||
if info_schema not in relations_by_info_schema:
|
||||
relations_by_info_schema[info_schema] = []
|
||||
relations_by_info_schema[info_schema].append(relation)
|
||||
|
||||
return relations_by_info_schema
|
||||
|
||||
def _get_catalog_relations(self, manifest: Manifest) -> List[BaseRelation]:
|
||||
|
||||
nodes = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
@@ -423,14 +466,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
relation = self.Relation.create_from(self.config, node)
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
# are sets of lowercase schema names that are valid members of those
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
relations = [self.Relation.create_from(self.config, n) for n in nodes]
|
||||
return relations
|
||||
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||
@@ -925,6 +963,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
|
||||
:param agate_table: The table
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
@@ -982,6 +1031,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||
agate_type: Type = agate_table.column_types[col_idx]
|
||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||
(Integer, cls.convert_integer_type),
|
||||
(agate.Text, cls.convert_text_type),
|
||||
(agate.Number, cls.convert_number_type),
|
||||
(agate.Boolean, cls.convert_boolean_type),
|
||||
@@ -1093,25 +1143,108 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
schema_map = self._get_catalog_schemas(manifest)
|
||||
def _get_one_catalog_by_relations(
|
||||
self,
|
||||
information_schema: InformationSchema,
|
||||
relations: List[BaseRelation],
|
||||
manifest: Manifest,
|
||||
) -> agate.Table:
|
||||
|
||||
kwargs = {
|
||||
"information_schema": information_schema,
|
||||
"relations": relations,
|
||||
}
|
||||
table = self.execute_macro(
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME,
|
||||
kwargs=kwargs,
|
||||
# pass in the full manifest, so we get any local project
|
||||
# overrides
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_filtered_catalog(
|
||||
self, manifest: Manifest, relations: Optional[Set[BaseRelation]] = None
|
||||
):
|
||||
catalogs: agate.Table
|
||||
if (
|
||||
relations is None
|
||||
or len(relations) > 100
|
||||
or not self.supports(Capability.SchemaMetadataByRelations)
|
||||
):
|
||||
# Do it the traditional way. We get the full catalog.
|
||||
catalogs, exceptions = self.get_catalog(manifest)
|
||||
else:
|
||||
# Do it the new way. We try to save time by selecting information
|
||||
# only for the exact set of relations we are interested in.
|
||||
catalogs, exceptions = self.get_catalog_by_relations(manifest, relations)
|
||||
|
||||
if relations and catalogs:
|
||||
relation_map = {
|
||||
(
|
||||
r.database.casefold() if r.database else None,
|
||||
r.schema.casefold() if r.schema else None,
|
||||
r.identifier.casefold() if r.identifier else None,
|
||||
)
|
||||
for r in relations
|
||||
}
|
||||
|
||||
def in_map(row: agate.Row):
|
||||
d = _expect_row_value("table_database", row)
|
||||
s = _expect_row_value("table_schema", row)
|
||||
i = _expect_row_value("table_name", row)
|
||||
d = d.casefold() if d is not None else None
|
||||
s = s.casefold() if s is not None else None
|
||||
i = i.casefold() if i is not None else None
|
||||
return (d, s, i) in relation_map
|
||||
|
||||
catalogs = catalogs.where(in_map)
|
||||
|
||||
return catalogs, exceptions
|
||||
|
||||
def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
|
||||
pass
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
schema_map: SchemaSearchMap = self._get_catalog_schemas(manifest)
|
||||
for info, schemas in schema_map.items():
|
||||
if len(schemas) == 0:
|
||||
continue
|
||||
name = ".".join([str(info.database), "information_schema"])
|
||||
|
||||
fut = tpe.submit_connected(
|
||||
self, name, self._get_one_catalog, info, schemas, manifest
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def get_catalog_by_relations(
|
||||
self, manifest: Manifest, relations: Set[BaseRelation]
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
|
||||
for info_schema in relations_by_schema:
|
||||
name = ".".join([str(info_schema.database), "information_schema"])
|
||||
relations = set(relations_by_schema[info_schema])
|
||||
fut = tpe.submit_connected(
|
||||
self,
|
||||
name,
|
||||
self._get_one_catalog_by_relations,
|
||||
info_schema,
|
||||
relations,
|
||||
manifest,
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def cancel_open_connections(self):
|
||||
"""Cancel all open connections."""
|
||||
return self.connections.cancel_open()
|
||||
@@ -1122,7 +1255,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1157,13 +1290,52 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
freshness = {
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def calculate_freshness_from_metadata(
|
||||
self,
|
||||
source: BaseRelation,
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
kwargs: Dict[str, Any] = {
|
||||
"information_schema": source.information_schema_only(),
|
||||
"relations": [source],
|
||||
}
|
||||
result = self.execute_macro(
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, manifest=manifest
|
||||
)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
|
||||
try:
|
||||
row = table[0]
|
||||
last_modified_val = get_column_value_uncased("last_modified", row)
|
||||
snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
|
||||
except Exception:
|
||||
raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
|
||||
|
||||
if last_modified_val is None:
|
||||
# Interpret missing value as "infinitely long ago"
|
||||
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
|
||||
else:
|
||||
max_loaded_at = _utc(last_modified_val, None, "last_modified")
|
||||
|
||||
snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
|
||||
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
runs. The hook can assume it has a connection available.
|
||||
@@ -1437,6 +1609,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def capabilities(cls) -> CapabilityDict:
|
||||
return cls._capabilities
|
||||
|
||||
@classmethod
|
||||
def supports(cls, capability: Capability) -> bool:
|
||||
return bool(cls.capabilities()[capability])
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
|
||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
|
||||
@@ -29,7 +29,7 @@ class AdapterPlugin:
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None,
|
||||
):
|
||||
) -> None:
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
|
||||
@@ -11,7 +11,7 @@ from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
def __init__(self, node) -> None:
|
||||
self._inner_node = node
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -57,7 +57,7 @@ QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None:
|
||||
self.manifest = manifest
|
||||
self.config = config
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
@@ -23,6 +23,7 @@ import dbt.exceptions
|
||||
|
||||
|
||||
Self = TypeVar("Self", bound="BaseRelation")
|
||||
SerializableIterable = Union[Tuple, FrozenSet]
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
@@ -36,6 +37,18 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
||||
# adding a relation type here also requires defining the associated rename macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql()
|
||||
renameable_relations: SerializableIterable = ()
|
||||
|
||||
# register relation types that are atomically replaceable, e.g. they have "create or replace" syntax
|
||||
# adding a relation type here also requires defining the associated replace macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql()
|
||||
replaceable_relations: SerializableIterable = ()
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
return self.path.get_lowered_part(field) == value.lower()
|
||||
@@ -169,7 +182,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
@@ -286,6 +298,14 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
@property
|
||||
def can_be_renamed(self) -> bool:
|
||||
return self.type in self.renameable_relations
|
||||
|
||||
@property
|
||||
def can_be_replaced(self) -> bool:
|
||||
return self.type in self.replaceable_relations
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
@@ -439,11 +459,11 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
self[key].add(schema)
|
||||
|
||||
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||
for information_schema_name, schemas in self.items():
|
||||
for information_schema, schemas in self.items():
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
yield information_schema, schema
|
||||
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap":
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
|
||||
@@ -38,8 +38,8 @@ class _CachedRelation:
|
||||
:attr BaseRelation inner: The underlying dbt relation.
|
||||
"""
|
||||
|
||||
def __init__(self, inner):
|
||||
self.referenced_by = {}
|
||||
def __init__(self, inner) -> None:
|
||||
self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
52
core/dbt/adapters/capability.py
Normal file
52
core/dbt/adapters/capability.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional, DefaultDict, Mapping
|
||||
|
||||
|
||||
class Capability(str, Enum):
|
||||
"""Enumeration of optional adapter features which can be probed using BaseAdapter.has_feature()"""
|
||||
|
||||
SchemaMetadataByRelations = "SchemaMetadataByRelations"
|
||||
"""Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving
|
||||
all the relations in a schema."""
|
||||
|
||||
TableLastModifiedMetadata = "TableLastModifiedMetadata"
|
||||
"""Indicates support for determining the time of the last table modification by querying database metadata."""
|
||||
|
||||
|
||||
class Support(str, Enum):
|
||||
Unknown = "Unknown"
|
||||
"""The adapter has not declared whether this capability is a feature of the underlying DBMS."""
|
||||
|
||||
Unsupported = "Unsupported"
|
||||
"""This capability is not possible with the underlying DBMS, so the adapter does not implement related macros."""
|
||||
|
||||
NotImplemented = "NotImplemented"
|
||||
"""This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter."""
|
||||
|
||||
Versioned = "Versioned"
|
||||
"""Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
Full = "Full"
|
||||
"""All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class CapabilitySupport:
|
||||
support: Support
|
||||
first_version: Optional[str] = None
|
||||
|
||||
def __bool__(self):
|
||||
return self.support == Support.Versioned or self.support == Support.Full
|
||||
|
||||
|
||||
class CapabilityDict(DefaultDict[Capability, CapabilitySupport]):
|
||||
def __init__(self, vals: Mapping[Capability, CapabilitySupport]):
|
||||
super().__init__(self._default)
|
||||
self.update(vals)
|
||||
|
||||
@staticmethod
|
||||
def _default():
|
||||
return CapabilitySupport(support=Support.Unknown)
|
||||
@@ -19,7 +19,7 @@ Adapter = AdapterProtocol
|
||||
|
||||
|
||||
class AdapterContainer:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.lock = threading.Lock()
|
||||
self.adapters: Dict[str, Adapter] = {}
|
||||
self.plugins: Dict[str, AdapterPlugin] = {}
|
||||
|
||||
@@ -90,7 +90,7 @@ class AdapterProtocol( # type: ignore[misc]
|
||||
ConnectionManager: Type[ConnectionManager_T]
|
||||
connections: ConnectionManager_T
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig):
|
||||
def __init__(self, config: AdapterRequiredConfig) -> None:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -75,6 +75,10 @@ class SQLAdapter(BaseAdapter):
|
||||
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined]
|
||||
return "float8" if decimals else "integer"
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "boolean"
|
||||
|
||||
@@ -24,6 +24,7 @@ if os.name != "nt":
|
||||
FLAGS_DEFAULTS = {
|
||||
"INDIRECT_SELECTION": "eager",
|
||||
"TARGET_PATH": None,
|
||||
"WARN_ERROR": None,
|
||||
# Cli args without user_config or env var option.
|
||||
"FULL_REFRESH": False,
|
||||
"STRICT_MODE": False,
|
||||
@@ -57,7 +58,7 @@ def args_to_context(args: List[str]) -> Context:
|
||||
from dbt.cli.main import cli
|
||||
|
||||
cli_ctx = cli.make_context(cli.name, args)
|
||||
# Split args if they're a comma seperated string.
|
||||
# Split args if they're a comma separated string.
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
@@ -78,7 +79,6 @@ class Flags:
|
||||
def __init__(
|
||||
self, ctx: Optional[Context] = None, user_config: Optional[UserConfig] = None
|
||||
) -> None:
|
||||
|
||||
# Set the default flags.
|
||||
for key, value in FLAGS_DEFAULTS.items():
|
||||
object.__setattr__(self, key, value)
|
||||
@@ -120,7 +120,6 @@ class Flags:
|
||||
# respected over DBT_PRINT or --print.
|
||||
new_name: Union[str, None] = None
|
||||
if param_name in DEPRECATED_PARAMS:
|
||||
|
||||
# Deprecated env vars can only be set via env var.
|
||||
# We use the deprecated option in click to serialize the value
|
||||
# from the env var string.
|
||||
@@ -315,7 +314,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
default_args = set([x.lower() for x in FLAGS_DEFAULTS.keys()])
|
||||
|
||||
res = command.to_list()
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
@@ -327,7 +325,9 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
continue
|
||||
|
||||
# param was assigned from defaults and should not be included
|
||||
if k not in (cmd_args | prnt_args) - default_args:
|
||||
if k not in (cmd_args | prnt_args) or (
|
||||
k in default_args and v == FLAGS_DEFAULTS[k.upper()]
|
||||
):
|
||||
continue
|
||||
|
||||
# if the param is in parent args, it should come before the arg name
|
||||
@@ -340,6 +340,10 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
spinal_cased = k.replace("_", "-")
|
||||
|
||||
# MultiOption flags come back as lists, but we want to pass them as space separated strings
|
||||
if isinstance(v, list):
|
||||
v = " ".join(v)
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import functools
|
||||
from copy import copy
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, List, Optional, Union
|
||||
@@ -64,7 +65,7 @@ class dbtRunner:
|
||||
self,
|
||||
manifest: Optional[Manifest] = None,
|
||||
callbacks: Optional[List[Callable[[EventMsg], None]]] = None,
|
||||
):
|
||||
) -> None:
|
||||
self.manifest = manifest
|
||||
|
||||
if callbacks is None:
|
||||
@@ -118,6 +119,44 @@ class dbtRunner:
|
||||
)
|
||||
|
||||
|
||||
# approach from https://github.com/pallets/click/issues/108#issuecomment-280489786
|
||||
def global_flags(func):
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.deprecated_print
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@p.write_json
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# dbt
|
||||
@click.group(
|
||||
context_settings={"help_option_names": ["-h", "--help"]},
|
||||
@@ -126,38 +165,11 @@ class dbtRunner:
|
||||
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||
)
|
||||
@click.pass_context
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.deprecated_print
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@global_flags
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
@p.log_format
|
||||
@p.show_resource_report
|
||||
def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
For more documentation on these commands, visit: docs.getdbt.com
|
||||
@@ -167,13 +179,14 @@ def cli(ctx, **kwargs):
|
||||
# dbt build
|
||||
@cli.command("build")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.include_saved_query
|
||||
@p.indirect_selection
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@@ -190,7 +203,6 @@ def cli(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -213,6 +225,8 @@ def build(ctx, **kwargs):
|
||||
# dbt clean
|
||||
@cli.command("clean")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.clean_project_files_only
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -235,6 +249,7 @@ def clean(ctx, **kwargs):
|
||||
# dbt docs
|
||||
@cli.group()
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
def docs(ctx, **kwargs):
|
||||
"""Generate or serve the documentation website for your project"""
|
||||
|
||||
@@ -242,6 +257,7 @@ def docs(ctx, **kwargs):
|
||||
# dbt docs generate
|
||||
@docs.command("generate")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.compile_docs
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@@ -254,6 +270,7 @@ def docs(ctx, **kwargs):
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.empty_catalog
|
||||
@p.static
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@@ -261,7 +278,6 @@ def docs(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -284,6 +300,7 @@ def docs_generate(ctx, **kwargs):
|
||||
# dbt docs serve
|
||||
@docs.command("serve")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.browser
|
||||
@p.port
|
||||
@p.profile
|
||||
@@ -312,6 +329,7 @@ def docs_serve(ctx, **kwargs):
|
||||
# dbt compile
|
||||
@cli.command("compile")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -330,11 +348,11 @@ def docs_serve(ctx, **kwargs):
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.compile_inject_ephemeral_ctes
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -358,6 +376,7 @@ def compile(ctx, **kwargs):
|
||||
# dbt show
|
||||
@cli.command("show")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -381,7 +400,6 @@ def compile(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -405,6 +423,7 @@ def show(ctx, **kwargs):
|
||||
# dbt debug
|
||||
@cli.command("debug")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.debug_connection
|
||||
@p.config_dir
|
||||
@p.profile
|
||||
@@ -412,7 +431,6 @@ def show(ctx, **kwargs):
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def debug(ctx, **kwargs):
|
||||
@@ -431,18 +449,46 @@ def debug(ctx, **kwargs):
|
||||
# dbt deps
|
||||
@cli.command("deps")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.profile
|
||||
@p.profiles_dir_exists_false
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.source
|
||||
@p.dry_run
|
||||
@p.lock
|
||||
@p.upgrade
|
||||
@p.add_package
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def deps(ctx, **kwargs):
|
||||
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
"""Install dbt packages specified.
|
||||
In the following case, a new `package-lock.yml` will be generated and the packages are installed:
|
||||
- user updated the packages.yml
|
||||
- user specify the flag --update, which means for packages that are specified as a
|
||||
range, dbt-core will try to install the newer version
|
||||
Otherwise, deps will use `package-lock.yml` as source of truth to install packages.
|
||||
|
||||
There is a way to add new packages by providing an `--add-package` flag to deps command
|
||||
which will allow user to specify a package they want to add in the format of packagename@version.
|
||||
"""
|
||||
flags = ctx.obj["flags"]
|
||||
if flags.ADD_PACKAGE:
|
||||
if not flags.ADD_PACKAGE["version"] and flags.SOURCE != "local":
|
||||
raise BadOptionUsage(
|
||||
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
||||
option_name="--add-package",
|
||||
)
|
||||
else:
|
||||
if flags.DRY_RUN:
|
||||
raise BadOptionUsage(
|
||||
message="Invalid flag `--dry-run` when not using `--add-package`.",
|
||||
option_name="--dry-run",
|
||||
)
|
||||
task = DepsTask(flags, ctx.obj["project"])
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
@@ -451,6 +497,7 @@ def deps(ctx, **kwargs):
|
||||
# dbt init
|
||||
@cli.command("init")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||
@click.argument("project_name", required=False)
|
||||
@p.profile
|
||||
@@ -473,6 +520,7 @@ def init(ctx, **kwargs):
|
||||
# dbt list
|
||||
@cli.command("list")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.indirect_selection
|
||||
@p.models
|
||||
@@ -518,6 +566,7 @@ cli.add_command(ls, "ls")
|
||||
# dbt parse
|
||||
@cli.command("parse")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -525,7 +574,6 @@ cli.add_command(ls, "ls")
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -535,19 +583,18 @@ cli.add_command(ls, "ls")
|
||||
def parse(ctx, **kwargs):
|
||||
"""Parses the project and provides information on performance"""
|
||||
# manifest generation and writing happens in @requires.manifest
|
||||
|
||||
return ctx.obj["manifest"], True
|
||||
|
||||
|
||||
# dbt run
|
||||
@cli.command("run")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.full_refresh
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@@ -561,7 +608,6 @@ def parse(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -584,6 +630,7 @@ def run(ctx, **kwargs):
|
||||
# dbt retry
|
||||
@cli.command("retry")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.project_dir
|
||||
@p.profiles_dir
|
||||
@p.vars
|
||||
@@ -591,13 +638,12 @@ def run(ctx, **kwargs):
|
||||
@p.target
|
||||
@p.state
|
||||
@p.threads
|
||||
@p.fail_fast
|
||||
@p.full_refresh
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def retry(ctx, **kwargs):
|
||||
"""Retry the nodes that failed in the previous run."""
|
||||
task = RetryTask(
|
||||
@@ -614,6 +660,7 @@ def retry(ctx, **kwargs):
|
||||
# dbt clone
|
||||
@cli.command("clone")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer_state
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@@ -628,7 +675,6 @@ def retry(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@@ -651,6 +697,7 @@ def clone(ctx, **kwargs):
|
||||
# dbt run operation
|
||||
@cli.command("run-operation")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@click.argument("macro")
|
||||
@p.args
|
||||
@p.profile
|
||||
@@ -682,6 +729,7 @@ def run_operation(ctx, **kwargs):
|
||||
# dbt seed
|
||||
@cli.command("seed")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@p.profile
|
||||
@@ -697,7 +745,6 @@ def run_operation(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -719,6 +766,7 @@ def seed(ctx, **kwargs):
|
||||
# dbt snapshot
|
||||
@cli.command("snapshot")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -758,6 +806,7 @@ def snapshot(ctx, **kwargs):
|
||||
# dbt source
|
||||
@cli.group()
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
def source(ctx, **kwargs):
|
||||
"""Manage your project's sources"""
|
||||
|
||||
@@ -765,6 +814,7 @@ def source(ctx, **kwargs):
|
||||
# dbt source freshness
|
||||
@source.command("freshness")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||
@p.profile
|
||||
@@ -807,10 +857,10 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
||||
# dbt test
|
||||
@cli.command("test")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.indirect_selection
|
||||
@@ -827,7 +877,6 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
|
||||
@@ -22,6 +22,26 @@ class YAML(ParamType):
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
class Package(ParamType):
|
||||
"""The Click STRING type. Converts string into dict with package name and version.
|
||||
Example package:
|
||||
package-name@1.0.0
|
||||
package-name
|
||||
"""
|
||||
|
||||
name = "NewPackage"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# assume non-string values are a problem
|
||||
if not isinstance(value, str):
|
||||
self.fail(f"Cannot load Package from type {type(value)}", param, ctx)
|
||||
try:
|
||||
package_name, package_version = value.split("@")
|
||||
return {"name": package_name, "version": package_version}
|
||||
except ValueError:
|
||||
return {"name": value, "version": None}
|
||||
|
||||
|
||||
class WarnErrorOptionsType(YAML):
|
||||
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
|
||||
|
||||
|
||||
@@ -2,19 +2,21 @@ import click
|
||||
import inspect
|
||||
import typing as t
|
||||
from click import Context
|
||||
from click.parser import OptionParser, ParsingState
|
||||
from dbt.cli.option_types import ChoiceTuple
|
||||
|
||||
|
||||
# Implementation from: https://stackoverflow.com/a/48394004
|
||||
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||
class MultiOption(click.Option):
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||
nargs = kwargs.pop("nargs", -1)
|
||||
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||
super(MultiOption, self).__init__(*args, **kwargs)
|
||||
self._previous_parser_process = None
|
||||
self._eat_all_parser = None
|
||||
# this makes mypy happy, setting these to None causes mypy failures
|
||||
self._previous_parser_process = lambda *args, **kwargs: None
|
||||
self._eat_all_parser = lambda *args, **kwargs: None
|
||||
|
||||
# validate that multiple=True
|
||||
multiple = kwargs.pop("multiple", None)
|
||||
@@ -29,34 +31,35 @@ class MultiOption(click.Option):
|
||||
else:
|
||||
assert isinstance(option_type, ChoiceTuple), msg
|
||||
|
||||
def add_to_parser(self, parser, ctx):
|
||||
def parser_process(value, state):
|
||||
def add_to_parser(self, parser: OptionParser, ctx: Context):
|
||||
def parser_process(value: str, state: ParsingState):
|
||||
# method to hook to the parser.process
|
||||
done = False
|
||||
value = [value]
|
||||
value_list = str.split(value, " ")
|
||||
if self.save_other_options:
|
||||
# grab everything up to the next option
|
||||
while state.rargs and not done:
|
||||
for prefix in self._eat_all_parser.prefixes:
|
||||
for prefix in self._eat_all_parser.prefixes: # type: ignore[attr-defined]
|
||||
if state.rargs[0].startswith(prefix):
|
||||
done = True
|
||||
if not done:
|
||||
value.append(state.rargs.pop(0))
|
||||
value_list.append(state.rargs.pop(0))
|
||||
else:
|
||||
# grab everything remaining
|
||||
value += state.rargs
|
||||
value_list += state.rargs
|
||||
state.rargs[:] = []
|
||||
value = tuple(value)
|
||||
value_tuple = tuple(value_list)
|
||||
# call the actual process
|
||||
self._previous_parser_process(value, state)
|
||||
self._previous_parser_process(value_tuple, state)
|
||||
|
||||
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||
for name in self.opts:
|
||||
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||
if our_parser:
|
||||
self._eat_all_parser = our_parser
|
||||
self._eat_all_parser = our_parser # type: ignore[assignment]
|
||||
self._previous_parser_process = our_parser.process
|
||||
our_parser.process = parser_process
|
||||
# mypy doesnt like assingment to a method see https://github.com/python/mypy/issues/708
|
||||
our_parser.process = parser_process # type: ignore[method-assign]
|
||||
break
|
||||
return retval
|
||||
|
||||
|
||||
@@ -2,10 +2,16 @@ from pathlib import Path
|
||||
|
||||
import click
|
||||
from dbt.cli.options import MultiOption
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType, Package
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
from dbt.version import get_version_information
|
||||
|
||||
add_package = click.option(
|
||||
"--add-package",
|
||||
help="Add a package to current package spec, specify it as package-name@version. Change the source with --source flag.",
|
||||
envvar=None,
|
||||
type=Package(),
|
||||
)
|
||||
args = click.option(
|
||||
"--args",
|
||||
envvar=None,
|
||||
@@ -40,6 +46,14 @@ compile_docs = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_inject_ephemeral_ctes = click.option(
|
||||
"--inject-ephemeral-ctes/--no-inject-ephemeral-ctes",
|
||||
envvar=None,
|
||||
help="Internal flag controlling injection of referenced ephemeral models' CTEs during `compile`.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
config_dir = click.option(
|
||||
"--config-dir",
|
||||
envvar=None,
|
||||
@@ -69,6 +83,14 @@ deprecated_defer = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
dry_run = click.option(
|
||||
"--dry-run",
|
||||
envvar=None,
|
||||
help="Option to run `dbt deps --add-package` without updating package-lock.yml file.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
|
||||
enable_legacy_logger = click.option(
|
||||
"--enable-legacy-logger/--no-enable-legacy-logger",
|
||||
envvar="DBT_ENABLE_LEGACY_LOGGER",
|
||||
@@ -119,6 +141,13 @@ indirect_selection = click.option(
|
||||
default="eager",
|
||||
)
|
||||
|
||||
lock = click.option(
|
||||
"--lock",
|
||||
envvar=None,
|
||||
help="Generate the package-lock.yml file without install the packages.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
log_cache_events = click.option(
|
||||
"--log-cache-events/--no-log-cache-events",
|
||||
help="Enable verbose logging for relational cache events to help when debugging.",
|
||||
@@ -307,7 +336,7 @@ printer_width = click.option(
|
||||
profile = click.option(
|
||||
"--profile",
|
||||
envvar=None,
|
||||
help="Which profile to load. Overrides setting in dbt_project.yml.",
|
||||
help="Which existing profile to load. Overrides setting in dbt_project.yml.",
|
||||
)
|
||||
|
||||
profiles_dir = click.option(
|
||||
@@ -360,6 +389,7 @@ resource_type = click.option(
|
||||
type=ChoiceTuple(
|
||||
[
|
||||
"metric",
|
||||
"semantic_model",
|
||||
"source",
|
||||
"analysis",
|
||||
"model",
|
||||
@@ -377,6 +407,14 @@ resource_type = click.option(
|
||||
default=(),
|
||||
)
|
||||
|
||||
include_saved_query = click.option(
|
||||
"--include-saved-query/--no-include-saved-query",
|
||||
envvar="DBT_INCLUDE_SAVED_QUERY",
|
||||
help="Include saved queries in the list of resources to be selected for build command",
|
||||
is_flag=True,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
@@ -414,6 +452,13 @@ send_anonymous_usage_stats = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
clean_project_files_only = click.option(
|
||||
"--clean-project-files-only / --no-clean-project-files-only",
|
||||
envvar="DBT_CLEAN_PROJECT_FILES_ONLY",
|
||||
help="If disabled, dbt clean will delete all paths specified in clean-paths, even if they're outside the dbt project.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show = click.option(
|
||||
"--show",
|
||||
envvar=None,
|
||||
@@ -449,6 +494,21 @@ empty_catalog = click.option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
source = click.option(
|
||||
"--source",
|
||||
envvar=None,
|
||||
help="Source to download page from, must be one of hub, git, or local. Defaults to hub.",
|
||||
type=click.Choice(["hub", "git", "local"], case_sensitive=True),
|
||||
default="hub",
|
||||
)
|
||||
|
||||
static = click.option(
|
||||
"--static",
|
||||
help="Generate an additional static_index.html with manifest and catalog built-in.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
state = click.option(
|
||||
"--state",
|
||||
envvar="DBT_STATE",
|
||||
@@ -517,6 +577,13 @@ target_path = click.option(
|
||||
type=click.Path(),
|
||||
)
|
||||
|
||||
upgrade = click.option(
|
||||
"--upgrade",
|
||||
envvar=None,
|
||||
help="Upgrade packages to the latest version.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
debug_connection = click.option(
|
||||
"--connection",
|
||||
envvar=None,
|
||||
@@ -598,3 +665,10 @@ write_json = click.option(
|
||||
help="Whether or not to write the manifest.json and run_results.json files to the target directory",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show_resource_report = click.option(
|
||||
"--show-resource-report/--no-show-resource-report",
|
||||
default=False,
|
||||
envvar="DBT_SHOW_RESOURCE_REPORT",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
@@ -9,24 +9,28 @@ from dbt.cli.exceptions import (
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||
from dbt.events.base_types import EventLevel
|
||||
from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger
|
||||
from dbt.events.types import (
|
||||
CommandCompleted,
|
||||
MainReportVersion,
|
||||
MainReportArgs,
|
||||
MainTrackingUserState,
|
||||
ResourceReport,
|
||||
)
|
||||
from dbt.events.helpers import get_json_string_utcnow
|
||||
from dbt.events.types import MainEncounteredError, MainStackTrace
|
||||
from dbt.exceptions import Exception as DbtException, DbtProjectError, FailFastError
|
||||
from dbt.parser.manifest import ManifestLoader, write_manifest
|
||||
from dbt.parser.manifest import parse_manifest
|
||||
from dbt.profiler import profiler
|
||||
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||
from dbt.utils import cast_dict_to_dict_of_strings
|
||||
from dbt.plugins import set_up_plugin_manager, get_plugin_manager
|
||||
from dbt.plugins import set_up_plugin_manager
|
||||
|
||||
|
||||
from click import Context
|
||||
from functools import update_wrapper
|
||||
import importlib.util
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -96,6 +100,28 @@ def postflight(func):
|
||||
fire_event(MainStackTrace(stack_trace=traceback.format_exc()))
|
||||
raise ExceptionExit(e)
|
||||
finally:
|
||||
# Fire ResourceReport, but only on systems which support the resource
|
||||
# module. (Skip it on Windows).
|
||||
if importlib.util.find_spec("resource") is not None:
|
||||
import resource
|
||||
|
||||
rusage = resource.getrusage(resource.RUSAGE_SELF)
|
||||
fire_event(
|
||||
ResourceReport(
|
||||
command_name=ctx.command.name,
|
||||
command_success=success,
|
||||
command_wall_clock_time=time.perf_counter() - start_func,
|
||||
process_user_time=rusage.ru_utime,
|
||||
process_kernel_time=rusage.ru_stime,
|
||||
process_mem_max_rss=rusage.ru_maxrss,
|
||||
process_in_blocks=rusage.ru_inblock,
|
||||
process_out_blocks=rusage.ru_oublock,
|
||||
),
|
||||
EventLevel.INFO
|
||||
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
||||
else None,
|
||||
)
|
||||
|
||||
fire_event(
|
||||
CommandCompleted(
|
||||
command=ctx.command_path,
|
||||
@@ -239,23 +265,14 @@ def manifest(*args0, write=True, write_perf_info=False):
|
||||
raise DbtProjectError("profile, project, and runtime_config required for manifest")
|
||||
|
||||
runtime_config = ctx.obj["runtime_config"]
|
||||
register_adapter(runtime_config)
|
||||
|
||||
# a manifest has already been set on the context, so don't overwrite it
|
||||
if ctx.obj.get("manifest") is None:
|
||||
manifest = ManifestLoader.get_full_manifest(
|
||||
runtime_config,
|
||||
write_perf_info=write_perf_info,
|
||||
ctx.obj["manifest"] = parse_manifest(
|
||||
runtime_config, write_perf_info, write, ctx.obj["flags"].write_json
|
||||
)
|
||||
|
||||
ctx.obj["manifest"] = manifest
|
||||
if write and ctx.obj["flags"].write_json:
|
||||
write_manifest(manifest, runtime_config.project_target_path)
|
||||
pm = get_plugin_manager(runtime_config.project_name)
|
||||
plugin_artifacts = pm.get_manifest_artifacts(manifest)
|
||||
for path, plugin_artifact in plugin_artifacts.items():
|
||||
plugin_artifact.write(path)
|
||||
|
||||
else:
|
||||
register_adapter(runtime_config)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
@@ -12,6 +12,20 @@ from dbt.exceptions import DbtRuntimeError
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
class Integer(agate.data_types.DataType):
|
||||
def cast(self, d):
|
||||
# by default agate will cast none as a Number
|
||||
# but we need to cast it as an Integer to preserve
|
||||
# the type when merging and unioning tables
|
||||
if type(d) == int or d is None:
|
||||
return d
|
||||
else:
|
||||
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
|
||||
|
||||
def jsonify(self, d):
|
||||
return d
|
||||
|
||||
|
||||
class Number(agate.data_types.Number):
|
||||
# undo the change in https://github.com/wireservice/agate/pull/733
|
||||
# i.e. do not cast True and False to numeric 1 and 0
|
||||
@@ -47,6 +61,7 @@ def build_type_tester(
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
Integer(null_values=("null", "")),
|
||||
Number(null_values=("null", "")),
|
||||
agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
|
||||
agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
|
||||
@@ -150,7 +165,7 @@ NullableAgateType = Union[agate.data_types.DataType, _NullMarker]
|
||||
|
||||
|
||||
class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
@@ -165,6 +180,13 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
elif isinstance(value, _NullMarker):
|
||||
# use the existing value
|
||||
return
|
||||
# when one table column is Number while another is Integer, force the column to Number on merge
|
||||
elif isinstance(value, Integer) and isinstance(existing_type, agate.data_types.Number):
|
||||
# use the existing value
|
||||
return
|
||||
elif isinstance(existing_type, Integer) and isinstance(value, agate.data_types.Number):
|
||||
# overwrite
|
||||
super().__setitem__(key, value)
|
||||
elif not isinstance(value, type(existing_type)):
|
||||
# actual type mismatch!
|
||||
raise DbtRuntimeError(
|
||||
@@ -176,8 +198,9 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
result: Dict[str, agate.data_types.DataType] = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, _NullMarker):
|
||||
# this is what agate would do.
|
||||
result[key] = agate.data_types.Number()
|
||||
# agate would make it a Number but we'll make it Integer so that if this column
|
||||
# gets merged with another Integer column, it won't get forced to a Number
|
||||
result[key] = Integer()
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
@@ -217,3 +240,12 @@ def merge_tables(tables: List[agate.Table]) -> agate.Table:
|
||||
rows.append(agate.Row(data, column_names))
|
||||
# _is_fork to tell agate that we already made things into `Row`s.
|
||||
return agate.Table(rows, column_names, column_types, _is_fork=True)
|
||||
|
||||
|
||||
def get_column_value_uncased(column_name: str, row: agate.Row) -> Any:
|
||||
"""Get the value of a column in this row, ignoring the casing of the column name."""
|
||||
for key, value in row.items():
|
||||
if key.casefold() == column_name.casefold():
|
||||
return value
|
||||
|
||||
raise KeyError
|
||||
|
||||
@@ -111,7 +111,7 @@ def checkout(cwd, repo, revision=None):
|
||||
def get_current_sha(cwd):
|
||||
out, err = run_cmd(cwd, ["git", "rev-parse", "HEAD"], env={"LC_ALL": "C"})
|
||||
|
||||
return out.decode("utf-8")
|
||||
return out.decode("utf-8").strip()
|
||||
|
||||
|
||||
def remove_remote(cwd):
|
||||
|
||||
@@ -125,7 +125,7 @@ def _get_tests_for_node(manifest: Manifest, unique_id: UniqueID) -> List[UniqueI
|
||||
|
||||
|
||||
class Linker:
|
||||
def __init__(self, data=None):
|
||||
def __init__(self, data=None) -> None:
|
||||
if data is None:
|
||||
data = {}
|
||||
self.graph = nx.DiGraph(**data)
|
||||
@@ -183,14 +183,16 @@ class Linker:
|
||||
def link_graph(self, manifest: Manifest):
|
||||
for source in manifest.sources.values():
|
||||
self.add_node(source.unique_id)
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.add_node(semantic_model.unique_id)
|
||||
for node in manifest.nodes.values():
|
||||
self.link_node(node, manifest)
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.link_node(semantic_model, manifest)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(exposure, manifest)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(metric, manifest)
|
||||
for saved_query in manifest.saved_queries.values():
|
||||
self.link_node(saved_query, manifest)
|
||||
|
||||
cycle = self.find_cycles()
|
||||
|
||||
@@ -274,7 +276,7 @@ class Linker:
|
||||
|
||||
|
||||
class Compiler:
|
||||
def __init__(self, config):
|
||||
def __init__(self, config) -> None:
|
||||
self.config = config
|
||||
|
||||
def initialize(self):
|
||||
@@ -320,6 +322,10 @@ class Compiler:
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# tech debt: safe flag/arg access (#6259)
|
||||
if not getattr(self.config.args, "inject_ephemeral_ctes", True):
|
||||
return (model, [])
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
@@ -83,7 +83,7 @@ class Profile(HasCredentials):
|
||||
user_config: UserConfig,
|
||||
threads: int,
|
||||
credentials: Credentials,
|
||||
):
|
||||
) -> None:
|
||||
"""Explicitly defining `__init__` to work around bug in Python 3.9.7
|
||||
https://bugs.python.org/issue45081
|
||||
"""
|
||||
|
||||
@@ -16,8 +16,12 @@ import os
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt import deprecations
|
||||
from dbt.constants import DEPENDENCIES_FILE_NAME, PACKAGES_FILE_NAME
|
||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||
from dbt.constants import (
|
||||
DEPENDENCIES_FILE_NAME,
|
||||
PACKAGES_FILE_NAME,
|
||||
PACKAGE_LOCK_HASH_KEY,
|
||||
)
|
||||
from dbt.clients.system import path_exists, load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
from dbt.exceptions import (
|
||||
@@ -94,16 +98,17 @@ def _load_yaml(path):
|
||||
return load_yaml_text(contents)
|
||||
|
||||
|
||||
def package_and_project_data_from_root(project_root):
|
||||
package_filepath = resolve_path_from_base(PACKAGES_FILE_NAME, project_root)
|
||||
dependencies_filepath = resolve_path_from_base(DEPENDENCIES_FILE_NAME, project_root)
|
||||
def load_yml_dict(file_path):
|
||||
ret = {}
|
||||
if path_exists(file_path):
|
||||
ret = _load_yaml(file_path) or {}
|
||||
return ret
|
||||
|
||||
packages_yml_dict = {}
|
||||
dependencies_yml_dict = {}
|
||||
if path_exists(package_filepath):
|
||||
packages_yml_dict = _load_yaml(package_filepath) or {}
|
||||
if path_exists(dependencies_filepath):
|
||||
dependencies_yml_dict = _load_yaml(dependencies_filepath) or {}
|
||||
|
||||
def package_and_project_data_from_root(project_root):
|
||||
|
||||
packages_yml_dict = load_yml_dict(f"{project_root}/{PACKAGES_FILE_NAME}")
|
||||
dependencies_yml_dict = load_yml_dict(f"{project_root}/{DEPENDENCIES_FILE_NAME}")
|
||||
|
||||
if "packages" in packages_yml_dict and "packages" in dependencies_yml_dict:
|
||||
msg = "The 'packages' key cannot be specified in both packages.yml and dependencies.yml"
|
||||
@@ -123,10 +128,21 @@ def package_and_project_data_from_root(project_root):
|
||||
return packages_dict, packages_specified_path
|
||||
|
||||
|
||||
def package_config_from_data(packages_data: Dict[str, Any]) -> PackageConfig:
|
||||
def package_config_from_data(
|
||||
packages_data: Dict[str, Any],
|
||||
unrendered_packages_data: Optional[Dict[str, Any]] = None,
|
||||
) -> PackageConfig:
|
||||
if not packages_data:
|
||||
packages_data = {"packages": []}
|
||||
|
||||
# this depends on the two lists being in the same order
|
||||
if unrendered_packages_data:
|
||||
unrendered_packages_data = deepcopy(unrendered_packages_data)
|
||||
for i in range(0, len(packages_data.get("packages", []))):
|
||||
packages_data["packages"][i]["unrendered"] = unrendered_packages_data["packages"][i]
|
||||
|
||||
if PACKAGE_LOCK_HASH_KEY in packages_data:
|
||||
packages_data.pop(PACKAGE_LOCK_HASH_KEY)
|
||||
try:
|
||||
PackageConfig.validate(packages_data)
|
||||
packages = PackageConfig.from_dict(packages_data)
|
||||
@@ -319,7 +335,7 @@ class PartialProject(RenderComponents):
|
||||
|
||||
def render_package_metadata(self, renderer: PackageRenderer) -> ProjectPackageMetadata:
|
||||
packages_data = renderer.render_data(self.packages_dict)
|
||||
packages_config = package_config_from_data(packages_data)
|
||||
packages_config = package_config_from_data(packages_data, self.packages_dict)
|
||||
if not self.project_name:
|
||||
raise DbtProjectError("Package dbt_project.yml must have a name!")
|
||||
return ProjectPackageMetadata(self.project_name, packages_config.packages)
|
||||
@@ -426,8 +442,11 @@ class PartialProject(RenderComponents):
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
semantic_models: Dict[str, Any]
|
||||
saved_queries: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
dispatch = cfg.dispatch
|
||||
models = cfg.models
|
||||
@@ -436,6 +455,8 @@ class PartialProject(RenderComponents):
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
metrics = cfg.metrics
|
||||
semantic_models = cfg.semantic_models
|
||||
saved_queries = cfg.saved_queries
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -449,8 +470,9 @@ class PartialProject(RenderComponents):
|
||||
on_run_end: List[str] = value_or(cfg.on_run_end, [])
|
||||
|
||||
query_comment = _query_comment_from_cfg(cfg.query_comment)
|
||||
|
||||
packages: PackageConfig = package_config_from_data(rendered.packages_dict)
|
||||
packages: PackageConfig = package_config_from_data(
|
||||
rendered.packages_dict, unrendered.packages_dict
|
||||
)
|
||||
selectors = selector_config_from_data(rendered.selectors_dict)
|
||||
manifest_selectors: Dict[str, Any] = {}
|
||||
if rendered.selectors_dict and rendered.selectors_dict["selectors"]:
|
||||
@@ -459,6 +481,8 @@ class PartialProject(RenderComponents):
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict["selectors"]
|
||||
)
|
||||
dbt_cloud = cfg.dbt_cloud
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
@@ -492,12 +516,15 @@ class PartialProject(RenderComponents):
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
metrics=metrics,
|
||||
semantic_models=semantic_models,
|
||||
saved_queries=saved_queries,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
unrendered=unrendered,
|
||||
project_env_vars=project_env_vars,
|
||||
restrict_access=cfg.restrict_access,
|
||||
dbt_cloud=dbt_cloud,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
@@ -541,6 +568,7 @@ class PartialProject(RenderComponents):
|
||||
packages_specified_path,
|
||||
) = package_and_project_data_from_root(project_root)
|
||||
selectors_dict = selector_data_from_root(project_root)
|
||||
|
||||
return cls.from_dicts(
|
||||
project_root=project_root,
|
||||
project_dict=project_dict,
|
||||
@@ -598,6 +626,8 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
semantic_models: Dict[str, Any]
|
||||
saved_queries: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -609,6 +639,7 @@ class Project:
|
||||
unrendered: RenderComponents
|
||||
project_env_vars: Dict[str, Any]
|
||||
restrict_access: bool
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
@@ -673,11 +704,14 @@ class Project:
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"semantic-models": self.semantic_models,
|
||||
"saved-queries": self.saved_queries,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
"restrict-access": self.restrict_access,
|
||||
"dbt-cloud": self.dbt_cloud,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
|
||||
@@ -74,7 +74,7 @@ def _list_if_none_or_string(value):
|
||||
|
||||
|
||||
class ProjectPostprocessor(Dict[Keypath, Callable[[Any], Any]]):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
self[("on-run-start",)] = _list_if_none_or_string
|
||||
|
||||
@@ -167,6 +167,8 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
semantic_models=project.semantic_models,
|
||||
saved_queries=project.saved_queries,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -182,6 +184,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
dbt_cloud=project.dbt_cloud,
|
||||
)
|
||||
|
||||
# Called by 'load_projects' in this class
|
||||
@@ -322,6 +325,8 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"semantic_models": self._get_config_paths(self.semantic_models),
|
||||
"saved_queries": self._get_config_paths(self.saved_queries),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
@@ -404,7 +409,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
|
||||
class UnsetCredentials(Credentials):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__("", "")
|
||||
|
||||
@property
|
||||
|
||||
@@ -9,8 +9,11 @@ PIN_PACKAGE_URL = (
|
||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"
|
||||
)
|
||||
|
||||
DBT_PROJECT_FILE_NAME = "dbt_project.yml"
|
||||
PACKAGES_FILE_NAME = "packages.yml"
|
||||
DEPENDENCIES_FILE_NAME = "dependencies.yml"
|
||||
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
||||
MANIFEST_FILE_NAME = "manifest.json"
|
||||
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
||||
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
||||
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
from typing import Any, Callable, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
import threading
|
||||
|
||||
from dbt.flags import get_flags
|
||||
@@ -86,33 +88,29 @@ def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
|
||||
|
||||
class ContextMember:
|
||||
def __init__(self, value, name=None):
|
||||
def __init__(self, value: Any, name: Optional[str] = None) -> None:
|
||||
self.name = name
|
||||
self.inner = value
|
||||
|
||||
def key(self, default):
|
||||
def key(self, default: str) -> str:
|
||||
if self.name is None:
|
||||
return default
|
||||
return self.name
|
||||
|
||||
|
||||
def contextmember(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
return ContextMember(value)
|
||||
def contextmember(value: Optional[str] = None) -> Callable:
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
|
||||
|
||||
def contextproperty(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
return ContextMember(property(value))
|
||||
def contextproperty(value: Optional[str] = None) -> Callable:
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
|
||||
|
||||
class ContextMeta(type):
|
||||
def __new__(mcls, name, bases, dct):
|
||||
context_members = {}
|
||||
context_attrs = {}
|
||||
new_dct = {}
|
||||
def __new__(mcls, name, bases, dct: Dict[str, Any]) -> ContextMeta:
|
||||
context_members: Dict[str, Any] = {}
|
||||
context_attrs: Dict[str, Any] = {}
|
||||
new_dct: Dict[str, Any] = {}
|
||||
|
||||
for base in bases:
|
||||
context_members.update(getattr(base, "_context_members_", {}))
|
||||
@@ -148,27 +146,28 @@ class Var:
|
||||
return self._cli_vars
|
||||
|
||||
@property
|
||||
def node_name(self):
|
||||
def node_name(self) -> str:
|
||||
if self._node is not None:
|
||||
return self._node.name
|
||||
else:
|
||||
return "<Configuration>"
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
raise RequiredVarNotFoundError(var_name, self._merged, self._node)
|
||||
def get_missing_var(self, var_name: str) -> NoReturn:
|
||||
# TODO function name implies a non exception resolution
|
||||
raise RequiredVarNotFoundError(var_name, dict(self._merged), self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
def has_var(self, var_name: str) -> bool:
|
||||
return var_name in self._merged
|
||||
|
||||
def get_rendered_var(self, var_name):
|
||||
def get_rendered_var(self, var_name: str) -> Any:
|
||||
raw = self._merged[var_name]
|
||||
# if bool/int/float/etc are passed in, don't compile anything
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
|
||||
return get_rendered(raw, self._context)
|
||||
return get_rendered(raw, dict(self._context))
|
||||
|
||||
def __call__(self, var_name, default=_VAR_NOTSET):
|
||||
def __call__(self, var_name: str, default: Any = _VAR_NOTSET) -> Any:
|
||||
if self.has_var(var_name):
|
||||
return self.get_rendered_var(var_name)
|
||||
elif default is not self._VAR_NOTSET:
|
||||
@@ -178,13 +177,17 @@ class Var:
|
||||
|
||||
|
||||
class BaseContext(metaclass=ContextMeta):
|
||||
# subclass is TargetContext
|
||||
def __init__(self, cli_vars):
|
||||
self._ctx = {}
|
||||
self.cli_vars = cli_vars
|
||||
self.env_vars = {}
|
||||
# Set by ContextMeta
|
||||
_context_members_: Dict[str, Any]
|
||||
_context_attrs_: Dict[str, Any]
|
||||
|
||||
def generate_builtins(self):
|
||||
# subclass is TargetContext
|
||||
def __init__(self, cli_vars: Dict[str, Any]) -> None:
|
||||
self._ctx: Dict[str, Any] = {}
|
||||
self.cli_vars: Dict[str, Any] = cli_vars
|
||||
self.env_vars: Dict[str, Any] = {}
|
||||
|
||||
def generate_builtins(self) -> Dict[str, Any]:
|
||||
builtins: Dict[str, Any] = {}
|
||||
for key, value in self._context_members_.items():
|
||||
if hasattr(value, "__get__"):
|
||||
@@ -194,14 +197,14 @@ class BaseContext(metaclass=ContextMeta):
|
||||
return builtins
|
||||
|
||||
# no dbtClassMixin so this is not an actual override
|
||||
def to_dict(self):
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
self._ctx["context"] = self._ctx
|
||||
builtins = self.generate_builtins()
|
||||
self._ctx["builtins"] = builtins
|
||||
self._ctx.update(builtins)
|
||||
return self._ctx
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def dbt_version(self) -> str:
|
||||
"""The `dbt_version` variable returns the installed version of dbt that
|
||||
is currently running. It can be used for debugging or auditing
|
||||
@@ -221,7 +224,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return dbt_version
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> Var:
|
||||
"""Variables can be passed from your `dbt_project.yml` file into models
|
||||
during compilation. These variables are useful for configuring packages
|
||||
@@ -290,7 +293,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return Var(self._ctx, self.cli_vars)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
@@ -318,7 +321,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
if os.environ.get("DBT_MACRO_DEBUGGING"):
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def debug():
|
||||
"""Enter a debugger at this line in the compiled jinja code."""
|
||||
@@ -357,7 +360,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
raise MacroReturn(data)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def fromjson(string: str, default: Any = None) -> Any:
|
||||
"""The `fromjson` context method can be used to deserialize a json
|
||||
@@ -378,7 +381,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def tojson(value: Any, default: Any = None, sort_keys: bool = False) -> Any:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
@@ -401,7 +404,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def fromyaml(value: str, default: Any = None) -> Any:
|
||||
"""The fromyaml context method can be used to deserialize a yaml string
|
||||
@@ -432,7 +435,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
# safe_dump defaults to sort_keys=True, but we act like json.dumps (the
|
||||
# opposite)
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def toyaml(
|
||||
value: Any, default: Optional[str] = None, sort_keys: bool = False
|
||||
@@ -477,7 +480,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def set_strict(value: Iterable[Any]) -> Set[Any]:
|
||||
"""The `set_strict` context method can be used to convert any iterable
|
||||
@@ -519,7 +522,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def zip_strict(*args: Iterable[Any]) -> Iterable[Any]:
|
||||
"""The `zip_strict` context method can be used to used to return
|
||||
@@ -541,7 +544,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError as e:
|
||||
raise ZipStrictWrongTypeError(e)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def log(msg: str, info: bool = False) -> str:
|
||||
"""Logs a line to either the log file or stdout.
|
||||
@@ -562,7 +565,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info()))
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def run_started_at(self) -> Optional[datetime.datetime]:
|
||||
"""`run_started_at` outputs the timestamp that this run started, e.g.
|
||||
`2017-04-21 01:23:45.678`. The `run_started_at` variable is a Python
|
||||
@@ -590,19 +593,19 @@ class BaseContext(metaclass=ContextMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def invocation_id(self) -> Optional[str]:
|
||||
"""invocation_id outputs a UUID generated for this dbt run (useful for
|
||||
auditing)
|
||||
"""
|
||||
return get_invocation_id()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def thread_id(self) -> str:
|
||||
"""thread_id outputs an ID for the current thread (useful for auditing)"""
|
||||
return threading.current_thread().name
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def modules(self) -> Dict[str, Any]:
|
||||
"""The `modules` variable in the Jinja context contains useful Python
|
||||
modules for operating on data.
|
||||
@@ -627,7 +630,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
""" # noqa
|
||||
return get_context_modules()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def flags(self) -> Any:
|
||||
"""The `flags` variable contains true/false values for flags provided
|
||||
on the command line.
|
||||
@@ -644,7 +647,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return flags_module.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def print(msg: str) -> str:
|
||||
"""Prints a line to stdout.
|
||||
@@ -662,7 +665,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def diff_of_two_dicts(
|
||||
dict_a: Dict[str, List[str]], dict_b: Dict[str, List[str]]
|
||||
@@ -691,7 +694,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
dict_diff.update({k: dict_a[k]})
|
||||
return dict_diff
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def local_md5(value: str) -> str:
|
||||
"""Calculates an MD5 hash of the given string.
|
||||
|
||||
@@ -19,7 +19,7 @@ class ConfiguredContext(TargetContext):
|
||||
super().__init__(config.to_target_dict(), config.cli_vars)
|
||||
self.config = config
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def project_name(self) -> str:
|
||||
return self.config.project_name
|
||||
|
||||
@@ -80,11 +80,11 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
self._project_name = project_name
|
||||
self.schema_yaml_vars = schema_yaml_vars
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(self._ctx, self.config, self._project_name)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
@@ -113,7 +113,7 @@ class MacroResolvingContext(ConfiguredContext):
|
||||
def __init__(self, config):
|
||||
super().__init__(config)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(self._ctx, self.config, self.config.project_name)
|
||||
|
||||
|
||||
@@ -45,6 +45,10 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.SemanticModel:
|
||||
model_configs = unrendered.get("semantic_models")
|
||||
elif resource_type == NodeType.SavedQuery:
|
||||
model_configs = unrendered.get("saved_queries")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
else:
|
||||
@@ -70,6 +74,10 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.SemanticModel:
|
||||
model_configs = self.project.semantic_models
|
||||
elif resource_type == NodeType.SavedQuery:
|
||||
model_configs = self.project.saved_queries
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
else:
|
||||
@@ -189,9 +197,21 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
|
||||
def _update_from_config(self, result: C, partial: Dict[str, Any], validate: bool = False) -> C:
|
||||
translated = self._active_project.credentials.translate_aliases(partial)
|
||||
return result.update_from(
|
||||
translated = self.translate_hook_names(translated)
|
||||
updated = result.update_from(
|
||||
translated, self._active_project.credentials.type, validate=validate
|
||||
)
|
||||
return updated
|
||||
|
||||
def translate_hook_names(self, project_dict):
|
||||
# This is a kind of kludge because the fix for #6411 specifically allowed misspelling
|
||||
# the hook field names in dbt_project.yml, which only ever worked because we didn't
|
||||
# run validate on the dbt_project configs.
|
||||
if "pre_hook" in project_dict:
|
||||
project_dict["pre-hook"] = project_dict.pop("pre_hook")
|
||||
if "post_hook" in project_dict:
|
||||
project_dict["post-hook"] = project_dict.pop("post_hook")
|
||||
return project_dict
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
|
||||
@@ -24,7 +24,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
self.node = node
|
||||
self.manifest = manifest
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def doc(self, *args: str) -> str:
|
||||
"""The `doc` function is used to reference docs blocks in schema.yml
|
||||
files. It is analogous to the `ref` function. For more information,
|
||||
|
||||
@@ -2,7 +2,6 @@ import functools
|
||||
from typing import NoReturn
|
||||
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import JinjaLogWarning
|
||||
|
||||
from dbt.exceptions import (
|
||||
@@ -26,6 +25,8 @@ from dbt.exceptions import (
|
||||
ContractError,
|
||||
ColumnTypeMissingError,
|
||||
FailFastError,
|
||||
scrub_secrets,
|
||||
env_secrets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ class ManifestContext(ConfiguredContext):
|
||||
dct.update(self.namespace)
|
||||
return dct
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def context_macro_stack(self):
|
||||
return self.macro_stack
|
||||
|
||||
|
||||
@@ -618,7 +618,7 @@ class RuntimeMetricResolver(BaseMetricResolver):
|
||||
target_package=target_package,
|
||||
)
|
||||
|
||||
return ResolvedMetricReference(target_metric, self.manifest, self.Relation)
|
||||
return ResolvedMetricReference(target_metric, self.manifest)
|
||||
|
||||
|
||||
# `var` implementations.
|
||||
@@ -754,19 +754,19 @@ class ProviderContext(ManifestContext):
|
||||
self.model,
|
||||
)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def dbt_metadata_envs(self) -> Dict[str, str]:
|
||||
return get_metadata_vars()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def invocation_args_dict(self):
|
||||
return args_to_dict(self.config.args)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def _sql_results(self) -> Dict[str, Optional[AttrDict]]:
|
||||
return self.sql_results
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def load_result(self, name: str) -> Optional[AttrDict]:
|
||||
if name in self.sql_results:
|
||||
# handle the special case of "main" macro
|
||||
@@ -787,7 +787,7 @@ class ProviderContext(ManifestContext):
|
||||
# Handle trying to load a result that was never stored
|
||||
return None
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def store_result(
|
||||
self, name: str, response: Any, agate_table: Optional[agate.Table] = None
|
||||
) -> str:
|
||||
@@ -803,7 +803,7 @@ class ProviderContext(ManifestContext):
|
||||
)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def store_raw_result(
|
||||
self,
|
||||
name: str,
|
||||
@@ -815,7 +815,7 @@ class ProviderContext(ManifestContext):
|
||||
response = AdapterResponse(_message=message, code=code, rows_affected=rows_affected)
|
||||
return self.store_result(name, response, agate_table)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def validation(self):
|
||||
def validate_any(*args) -> Callable[[T], None]:
|
||||
def inner(value: T) -> None:
|
||||
@@ -836,7 +836,7 @@ class ProviderContext(ManifestContext):
|
||||
}
|
||||
)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def write(self, payload: str) -> str:
|
||||
# macros/source defs aren't 'writeable'.
|
||||
if isinstance(self.model, (Macro, SourceDefinition)):
|
||||
@@ -845,11 +845,11 @@ class ProviderContext(ManifestContext):
|
||||
self.model.write_node(self.config.project_root, self.model.build_path, payload)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def render(self, string: str) -> str:
|
||||
return get_rendered(string, self._ctx, self.model)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def try_or_compiler_error(
|
||||
self, message_if_exception: str, func: Callable, *args, **kwargs
|
||||
) -> Any:
|
||||
@@ -858,12 +858,22 @@ class ProviderContext(ManifestContext):
|
||||
except Exception:
|
||||
raise CompilationError(message_if_exception, self.model)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def load_agate_table(self) -> agate.Table:
|
||||
if not isinstance(self.model, SeedNode):
|
||||
raise LoadAgateTableNotSeedError(self.model.resource_type, node=self.model)
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
|
||||
# include package_path for seeds defined in packages
|
||||
package_path = (
|
||||
os.path.join(self.config.packages_install_path, self.model.package_name)
|
||||
if self.model.package_name != self.config.project_name
|
||||
else "."
|
||||
)
|
||||
path = os.path.join(self.config.project_root, package_path, self.model.original_file_path)
|
||||
if not os.path.exists(path):
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
|
||||
column_types = self.model.config.column_types
|
||||
delimiter = self.model.config.delimiter
|
||||
try:
|
||||
@@ -873,7 +883,7 @@ class ProviderContext(ManifestContext):
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
return table
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def ref(self) -> Callable:
|
||||
"""The most important function in dbt is `ref()`; it's impossible to
|
||||
build even moderately complex models without it. `ref()` is how you
|
||||
@@ -914,11 +924,11 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return self.provider.ref(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def source(self) -> Callable:
|
||||
return self.provider.source(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def metric(self) -> Callable:
|
||||
return self.provider.metric(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@@ -979,7 +989,7 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return self.provider.Config(self.model, self.context_config)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def execute(self) -> bool:
|
||||
"""`execute` is a Jinja variable that returns True when dbt is in
|
||||
"execute" mode.
|
||||
@@ -1040,7 +1050,7 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return self.provider.execute
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def exceptions(self) -> Dict[str, Any]:
|
||||
"""The exceptions namespace can be used to raise warnings and errors in
|
||||
dbt userspace.
|
||||
@@ -1078,15 +1088,15 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return wrapped_exports(self.model)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def database(self) -> str:
|
||||
return self.config.credentials.database
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def schema(self) -> str:
|
||||
return self.config.credentials.schema
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ModelConfiguredVar:
|
||||
return self.provider.Var(
|
||||
context=self._ctx,
|
||||
@@ -1103,22 +1113,22 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return self.db_wrapper
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def api(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"Relation": self.db_wrapper.Relation,
|
||||
"Column": self.adapter.Column,
|
||||
}
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def column(self) -> Type[Column]:
|
||||
return self.adapter.Column
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def env(self) -> Dict[str, Any]:
|
||||
return self.target
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def graph(self) -> Dict[str, Any]:
|
||||
"""The `graph` context variable contains information about the nodes in
|
||||
your dbt project. Models, sources, tests, and snapshots are all
|
||||
@@ -1227,30 +1237,42 @@ class ProviderContext(ManifestContext):
|
||||
|
||||
@contextproperty("model")
|
||||
def ctx_model(self) -> Dict[str, Any]:
|
||||
ret = self.model.to_dict(omit_none=True)
|
||||
model_dct = self.model.to_dict(omit_none=True)
|
||||
# Maintain direct use of compiled_sql
|
||||
# TODO add depreciation logic[CT-934]
|
||||
if "compiled_code" in ret:
|
||||
ret["compiled_sql"] = ret["compiled_code"]
|
||||
return ret
|
||||
if "compiled_code" in model_dct:
|
||||
model_dct["compiled_sql"] = model_dct["compiled_code"]
|
||||
|
||||
@contextproperty
|
||||
if (
|
||||
hasattr(self.model, "contract")
|
||||
and self.model.contract.alias_types is True
|
||||
and "columns" in model_dct
|
||||
):
|
||||
for column in model_dct["columns"].values():
|
||||
if "data_type" in column:
|
||||
orig_data_type = column["data_type"]
|
||||
# translate data_type to value in Column.TYPE_LABELS
|
||||
new_data_type = self.adapter.Column.translate_type(orig_data_type)
|
||||
column["data_type"] = new_data_type
|
||||
return model_dct
|
||||
|
||||
@contextproperty()
|
||||
def pre_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def post_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql_now(self) -> str:
|
||||
return self.adapter.date_function()
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def adapter_macro(self, name: str, *args, **kwargs):
|
||||
"""This was deprecated in v0.18 in favor of adapter.dispatch"""
|
||||
msg = (
|
||||
@@ -1262,7 +1284,7 @@ class ProviderContext(ManifestContext):
|
||||
)
|
||||
raise CompilationError(msg)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
@@ -1306,7 +1328,7 @@ class ProviderContext(ManifestContext):
|
||||
else:
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def selected_resources(self) -> List[str]:
|
||||
"""The `selected_resources` variable contains a list of the resources
|
||||
selected based on the parameters provided to the dbt command.
|
||||
@@ -1315,7 +1337,7 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return selected_resources.SELECTED_RESOURCES
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def submit_python_job(self, parsed_model: Dict, compiled_code: str) -> AdapterResponse:
|
||||
# Check macro_stack and that the unique id is for a materialization macro
|
||||
if not (
|
||||
@@ -1358,7 +1380,7 @@ class MacroContext(ProviderContext):
|
||||
class ModelContext(ProviderContext):
|
||||
model: ManifestNode
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def pre_hooks(self) -> List[Dict[str, Any]]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
@@ -1367,7 +1389,7 @@ class ModelContext(ProviderContext):
|
||||
h.to_dict(omit_none=True) for h in self.model.config.pre_hook # type: ignore[union-attr] # noqa
|
||||
]
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def post_hooks(self) -> List[Dict[str, Any]]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
@@ -1376,7 +1398,7 @@ class ModelContext(ProviderContext):
|
||||
h.to_dict(omit_none=True) for h in self.model.config.post_hook # type: ignore[union-attr] # noqa
|
||||
]
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql(self) -> Optional[str]:
|
||||
# only doing this in sql model for backward compatible
|
||||
if self.model.language == ModelLanguage.sql: # type: ignore[union-attr]
|
||||
@@ -1393,7 +1415,7 @@ class ModelContext(ProviderContext):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def compiled_code(self) -> Optional[str]:
|
||||
if getattr(self.model, "defer_relation", None):
|
||||
# TODO https://github.com/dbt-labs/dbt-core/issues/7976
|
||||
@@ -1404,15 +1426,15 @@ class ModelContext(ProviderContext):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def database(self) -> str:
|
||||
return getattr(self.model, "database", self.config.credentials.database)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def schema(self) -> str:
|
||||
return getattr(self.model, "schema", self.config.credentials.schema)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def this(self) -> Optional[RelationProxy]:
|
||||
"""`this` makes available schema information about the currently
|
||||
executing model. It's is useful in any context in which you need to
|
||||
@@ -1447,7 +1469,7 @@ class ModelContext(ProviderContext):
|
||||
return None
|
||||
return self.db_wrapper.Relation.create_from(self.config, self.model)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def defer_relation(self) -> Optional[RelationProxy]:
|
||||
"""
|
||||
For commands which add information about this node's corresponding
|
||||
@@ -1661,7 +1683,7 @@ class TestContext(ProviderContext):
|
||||
)
|
||||
self.namespace = macro_namespace
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
|
||||
@@ -14,7 +14,7 @@ class SecretContext(BaseContext):
|
||||
"""This context is used in profiles.yml + packages.yml. It can render secret
|
||||
env vars that aren't usable elsewhere"""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
|
||||
@@ -9,7 +9,7 @@ class TargetContext(BaseContext):
|
||||
super().__init__(cli_vars=cli_vars)
|
||||
self.target_dict = target_dict
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def target(self) -> Dict[str, Any]:
|
||||
"""`target` contains information about your connection to the warehouse
|
||||
(specified in profiles.yml). Some configs are shared between all
|
||||
|
||||
@@ -16,26 +16,21 @@ from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from typing_extensions import Protocol
|
||||
from typing_extensions import Protocol, Annotated
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
StrEnum,
|
||||
ExtensibleDbtClassMixin,
|
||||
HyphenatedDbtClassMixin,
|
||||
ValidatedStringMixin,
|
||||
register_pattern,
|
||||
)
|
||||
from dbt.contracts.util import Replaceable
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
||||
|
||||
|
||||
# we need register_pattern for jsonschema validation
|
||||
register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$")
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(dbtClassMixin):
|
||||
_message: str
|
||||
@@ -55,7 +50,8 @@ class ConnectionState(StrEnum):
|
||||
|
||||
@dataclass(init=False)
|
||||
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||
type: Identifier
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
|
||||
name: Optional[str] = None
|
||||
state: ConnectionState = ConnectionState.INIT
|
||||
transaction_open: bool = False
|
||||
@@ -108,7 +104,7 @@ class LazyHandle:
|
||||
connection, updating the handle on the Connection.
|
||||
"""
|
||||
|
||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
||||
def __init__(self, opener: Callable[[Connection], Connection]) -> None:
|
||||
self.opener = opener
|
||||
|
||||
def resolve(self, connection: Connection) -> Connection:
|
||||
@@ -161,6 +157,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
# Need to fixup dbname => database, pass => password
|
||||
data = cls.translate_aliases(data)
|
||||
return data
|
||||
|
||||
@@ -220,10 +217,10 @@ DEFAULT_QUERY_COMMENT = """
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryComment(HyphenatedDbtClassMixin):
|
||||
class QueryComment(dbtClassMixin):
|
||||
comment: str = DEFAULT_QUERY_COMMENT
|
||||
append: bool = False
|
||||
job_label: bool = False
|
||||
job_label: bool = field(default=False, metadata={"alias": "job-label"})
|
||||
|
||||
|
||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user