mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
95 Commits
jerco/upda
...
update-aga
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
260babbaf3 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
5e3e0f6b78 | ||
|
|
c24932235e | ||
|
|
628024a2fd | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
6890757b68 | ||
|
|
d8581d7130 | ||
|
|
01192021ed | ||
|
|
d1d0d4ff86 | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.7.0b2
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
70
.changes/1.7.0-b1.md
Normal file
70
.changes/1.7.0-b1.md
Normal file
@@ -0,0 +1,70 @@
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
29
.changes/1.7.0-b2.md
Normal file
29
.changes/1.7.0-b2.md
Normal file
@@ -0,0 +1,29 @@
|
||||
## dbt-core 1.7.0-b2 - September 01, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Add node attributes related to compilation to run_results.json ([#7519](https://github.com/dbt-labs/dbt-core/issues/7519))
|
||||
- Support configuration of semantic models with the addition of enable/disable and group enablement. ([#7968](https://github.com/dbt-labs/dbt-core/issues/7968))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add support for swapping materialized views with tables/views and vice versa ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Turn breaking changes to contracted models into warnings for unversioned models ([#8384](https://github.com/dbt-labs/dbt-core/issues/8384), [#8282](https://github.com/dbt-labs/dbt-core/issues/8282))
|
||||
- Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension` ([#8453](https://github.com/dbt-labs/dbt-core/issues/8453))
|
||||
- fix ambiguous reference error for tests and versions when model name is duplicated across packages ([#8327](https://github.com/dbt-labs/dbt-core/issues/8327), [#8493](https://github.com/dbt-labs/dbt-core/issues/8493))
|
||||
- Fix "Internal Error: Expected node <unique-id> not found in manifest" when depends_on set on ModelNodeArgs ([#8506](https://github.com/dbt-labs/dbt-core/issues/8506))
|
||||
- Fix snapshot success message ([#7583](https://github.com/dbt-labs/dbt-core/issues/7583))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix newline escapes and improve formatting in docker README ([dbt-docs/#8211](https://github.com/dbt-labs/dbt-docs/issues/8211))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Switch from hologram to mashumaro jsonschema ([#8426](https://github.com/dbt-labs/dbt-core/issues/8426))
|
||||
- Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>, relation agnostic in /macros/relations ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Update typing to meet mypy standards ([#8396](https://github.com/dbt-labs/dbt-core/issues/8396))
|
||||
- Mypy errors - adapters/factory.py ([#8387](https://github.com/dbt-labs/dbt-core/issues/8387))
|
||||
|
||||
### Contributors
|
||||
- [@jamezrin](https://github.com/jamezrin) ([#8211](https://github.com/dbt-labs/dbt-core/issues/8211))
|
||||
7
.changes/1.7.0/Breaking Changes-20230725-171359.yaml
Normal file
7
.changes/1.7.0/Breaking Changes-20230725-171359.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Breaking Changes
|
||||
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
|
||||
the existing RunResultError in their place.
|
||||
time: 2023-07-25T17:13:59.441682-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7963"
|
||||
6
.changes/1.7.0/Dependencies-20230621-005752.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
6
.changes/1.7.0/Dependencies-20230726-201740.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230726-201740.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.4.0 to 1.4.1"
|
||||
time: 2023-07-26T20:17:40.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8219
|
||||
6
.changes/1.7.0/Dependencies-20230727-145703.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230727-145703.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update pin for click<9
|
||||
time: 2023-07-27T14:57:03.180458-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8232"
|
||||
6
.changes/1.7.0/Dependencies-20230727-145726.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230727-145726.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Add upper bound to sqlparse pin of <0.5
|
||||
time: 2023-07-27T14:57:26.40416-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8236"
|
||||
6
.changes/1.7.0/Dependencies-20230728-135227.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230728-135227.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Support dbt-semantic-interfaces 0.2.0
|
||||
time: 2023-07-28T13:52:27.207241-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "8250"
|
||||
6
.changes/1.7.0/Docs-20230715-200907.yaml
Normal file
6
.changes/1.7.0/Docs-20230715-200907.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
6
.changes/1.7.0/Docs-20230727-170900.yaml
Normal file
6
.changes/1.7.0/Docs-20230727-170900.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fixed comment util.py
|
||||
time: 2023-07-27T17:09:00.089237+09:00
|
||||
custom:
|
||||
Author: d-kaneshiro
|
||||
Issue: None
|
||||
6
.changes/1.7.0/Docs-20230728-193438.yaml
Normal file
6
.changes/1.7.0/Docs-20230728-193438.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Fix newline escapes and improve formatting in docker README
|
||||
time: 2023-07-28T19:34:38.351042747+02:00
|
||||
custom:
|
||||
Author: jamezrin
|
||||
Issue: "8211"
|
||||
6
.changes/1.7.0/Docs-20230804-131815.yaml
Normal file
6
.changes/1.7.0/Docs-20230804-131815.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Display contract and column constraints on the model page
|
||||
time: 2023-08-04T13:18:15.627005-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "433"
|
||||
6
.changes/1.7.0/Docs-20230807-152548.yaml
Normal file
6
.changes/1.7.0/Docs-20230807-152548.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Display semantic model details in docs
|
||||
time: 2023-08-07T15:25:48.711627-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "431"
|
||||
7
.changes/1.7.0/Features-20230702-122813.yaml
Normal file
7
.changes/1.7.0/Features-20230702-122813.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Enable re-population of metadata vars post-environment change during programmatic
|
||||
invocation
|
||||
time: 2023-07-02T12:28:13.416305-04:00
|
||||
custom:
|
||||
Author: gem7318
|
||||
Issue: "8010"
|
||||
6
.changes/1.7.0/Features-20230714-202445.yaml
Normal file
6
.changes/1.7.0/Features-20230714-202445.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Added support to configure a delimiter for a seed file, defaults to comma
|
||||
time: 2023-07-14T20:24:45.513847165+02:00
|
||||
custom:
|
||||
Author: ramonvermeulen
|
||||
Issue: "3990"
|
||||
6
.changes/1.7.0/Features-20230803-151824.yaml
Normal file
6
.changes/1.7.0/Features-20230803-151824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Allow specification of `create_metric: true` on measures'
|
||||
time: 2023-08-03T15:18:24.351003-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8125"
|
||||
6
.changes/1.7.0/Features-20230821-103357.yaml
Normal file
6
.changes/1.7.0/Features-20230821-103357.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add node attributes related to compilation to run_results.json
|
||||
time: 2023-08-21T10:33:57.200883-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7519"
|
||||
6
.changes/1.7.0/Features-20230828-092100.yaml
Normal file
6
.changes/1.7.0/Features-20230828-092100.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support configuration of semantic models with the addition of enable/disable and group enablement.
|
||||
time: 2023-08-28T09:21:00.551633-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "7968"
|
||||
6
.changes/1.7.0/Fixes-20230424-210734.yaml
Normal file
6
.changes/1.7.0/Fixes-20230424-210734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy dir during `dbt deps` if symlink fails
|
||||
time: 2023-04-24T21:07:34.336797+05:30
|
||||
custom:
|
||||
Author: anjutiwari
|
||||
Issue: "7428 8223"
|
||||
6
.changes/1.7.0/Fixes-20230625-142731.yaml
Normal file
6
.changes/1.7.0/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
6
.changes/1.7.0/Fixes-20230717-160652.yaml
Normal file
6
.changes/1.7.0/Fixes-20230717-160652.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy target_schema from config into snapshot node
|
||||
time: 2023-07-17T16:06:52.957724-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6745"
|
||||
6
.changes/1.7.0/Fixes-20230720-122723.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-122723.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
6
.changes/1.7.0/Fixes-20230720-161513.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/1.7.0/Fixes-20230720-170112.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
6
.changes/1.7.0/Fixes-20230720-172422.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
6
.changes/1.7.0/Fixes-20230726-104448.yaml
Normal file
6
.changes/1.7.0/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/1.7.0/Fixes-20230727-125830.yaml
Normal file
6
.changes/1.7.0/Fixes-20230727-125830.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||
time: 2023-07-27T12:58:30.673803-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8230"
|
||||
7
.changes/1.7.0/Fixes-20230728-115620.yaml
Normal file
7
.changes/1.7.0/Fixes-20230728-115620.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||
etc)
|
||||
time: 2023-07-28T11:56:20.863718-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8166"
|
||||
6
.changes/1.7.0/Fixes-20230802-141556.yaml
Normal file
6
.changes/1.7.0/Fixes-20230802-141556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix retry not working with log-file-max-bytes
|
||||
time: 2023-08-02T14:15:56.306027-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8297"
|
||||
6
.changes/1.7.0/Fixes-20230806-222319.yaml
Normal file
6
.changes/1.7.0/Fixes-20230806-222319.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Detect changes to model access, version, or latest_version in state:modified
|
||||
time: 2023-08-06T22:23:19.166334-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8189"
|
||||
6
.changes/1.7.0/Fixes-20230810-184859.yaml
Normal file
6
.changes/1.7.0/Fixes-20230810-184859.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add connection status into list of statuses for dbt debug
|
||||
time: 2023-08-10T18:48:59.221344+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8350"
|
||||
6
.changes/1.7.0/Fixes-20230811-204144.yaml
Normal file
6
.changes/1.7.0/Fixes-20230811-204144.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: fix fqn-selection for external versioned models
|
||||
time: 2023-08-11T20:41:44.725144-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8374"
|
||||
7
.changes/1.7.0/Fixes-20230811-212008.yaml
Normal file
7
.changes/1.7.0/Fixes-20230811-212008.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: 'Fix: DbtInternalError after model that previously ref''d external model is
|
||||
deleted'
|
||||
time: 2023-08-11T21:20:08.145554-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8375"
|
||||
6
.changes/1.7.0/Fixes-20230814-145702.yaml
Normal file
6
.changes/1.7.0/Fixes-20230814-145702.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix using list command with path selector and project-dir
|
||||
time: 2023-08-14T14:57:02.02816-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8385"
|
||||
6
.changes/1.7.0/Fixes-20230815-104444.yaml
Normal file
6
.changes/1.7.0/Fixes-20230815-104444.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remedy performance regression by only writing run_results.json once.
|
||||
time: 2023-08-15T10:44:44.836991-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8360"
|
||||
6
.changes/1.7.0/Fixes-20230817-130915.yaml
Normal file
6
.changes/1.7.0/Fixes-20230817-130915.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Use python version 3.10.7 in Docker image.
|
||||
time: 2023-08-17T13:09:15.936349-05:00
|
||||
custom:
|
||||
Author: McKnight-42
|
||||
Issue: "8444"
|
||||
6
.changes/1.7.0/Fixes-20230817-185739.yaml
Normal file
6
.changes/1.7.0/Fixes-20230817-185739.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add support for swapping materialized views with tables/views and vice versa
|
||||
time: 2023-08-17T18:57:39.01958-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8449"
|
||||
6
.changes/1.7.0/Fixes-20230818-095348.yaml
Normal file
6
.changes/1.7.0/Fixes-20230818-095348.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension`
|
||||
time: 2023-08-18T09:53:48.154848-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8453"
|
||||
6
.changes/1.7.0/Fixes-20230818-103802.yaml
Normal file
6
.changes/1.7.0/Fixes-20230818-103802.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Turn breaking changes to contracted models into warnings for unversioned models
|
||||
time: 2023-08-18T10:38:02.251286-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: 8384 8282
|
||||
7
.changes/1.7.0/Fixes-20230824-161024.yaml
Normal file
7
.changes/1.7.0/Fixes-20230824-161024.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: fix ambiguous reference error for tests and versions when model name is duplicated across
|
||||
packages
|
||||
time: 2023-08-24T16:10:24.437362-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8327 8493"
|
||||
7
.changes/1.7.0/Fixes-20230828-125858.yaml
Normal file
7
.changes/1.7.0/Fixes-20230828-125858.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: 'Fix "Internal Error: Expected node <unique-id> not found in manifest" when
|
||||
depends_on set on ModelNodeArgs'
|
||||
time: 2023-08-28T12:58:58.061228-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8506"
|
||||
6
.changes/1.7.0/Fixes-20230830-150803.yaml
Normal file
6
.changes/1.7.0/Fixes-20230830-150803.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix snapshot success message
|
||||
time: 2023-08-30T15:08:03.429373-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7583"
|
||||
6
.changes/1.7.0/Under the Hood-20230718-145428.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230718-145428.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Switch from hologram to mashumaro jsonschema
|
||||
time: 2023-07-18T14:54:28.41453-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8426"
|
||||
6
.changes/1.7.0/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
6
.changes/1.7.0/Under the Hood-20230719-163334.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230719-163334.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
6
.changes/1.7.0/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
7
.changes/1.7.0/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
6
.changes/1.7.0/Under the Hood-20230807-164509.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230807-164509.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Bump manifest schema version to v11, freeze manifest v10
|
||||
time: 2023-08-07T16:45:09.712744-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8333"
|
||||
6
.changes/1.7.0/Under the Hood-20230809-094834.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230809-094834.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: add tracking for plugin.get_nodes calls
|
||||
time: 2023-08-09T09:48:34.819445-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8344"
|
||||
7
.changes/1.7.0/Under the Hood-20230811-100902.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230811-100902.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: 'add internal flag: --no-partial-parse-file-diff to inform whether to compute
|
||||
a file diff during partial parsing'
|
||||
time: 2023-08-11T10:09:02.832241-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8363"
|
||||
6
.changes/1.7.0/Under the Hood-20230815-170307.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230815-170307.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add return values to a number of functions for mypy
|
||||
time: 2023-08-15T17:03:07.895252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8389"
|
||||
6
.changes/1.7.0/Under the Hood-20230817-134548.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230817-134548.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fix mypy warnings for ManifestLoader.load()
|
||||
time: 2023-08-17T13:45:48.937252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8401"
|
||||
7
.changes/1.7.0/Under the Hood-20230821-134801.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230821-134801.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: 'Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>,
|
||||
relation agnostic in /macros/relations'
|
||||
time: 2023-08-21T13:48:01.474731-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8449"
|
||||
6
.changes/1.7.0/Under the Hood-20230823-194237.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230823-194237.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Update typing to meet mypy standards
|
||||
time: 2023-08-23T19:42:37.130694-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8396"
|
||||
6
.changes/1.7.0/Under the Hood-20230830-140231.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230830-140231.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Mypy errors - adapters/factory.py
|
||||
time: 2023-08-30T14:02:31.519929-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8387"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix for column tests not rendering on quoted columns
|
||||
time: 2023-05-31T11:54:19.687363-04:00
|
||||
custom:
|
||||
Author: drewbanin
|
||||
Issue: "201"
|
||||
6
.changes/unreleased/Features-20230830-212828.yaml
Normal file
6
.changes/unreleased/Features-20230830-212828.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Accept a `dbt-cloud` config in dbt_project.yml
|
||||
time: 2023-08-30T21:28:28.976746-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8438"
|
||||
6
.changes/unreleased/Fixes-20230803-093502.yaml
Normal file
6
.changes/unreleased/Fixes-20230803-093502.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: update agage.Number to handle ints
|
||||
time: 2023-08-03T09:35:02.163968-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "8153"
|
||||
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,7 +17,7 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
37
.github/workflows/docs-issue.yml
vendored
Normal file
37
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
if: contains( github.event.pull_request.labels.*.name, 'user docs') && github.event.pull_request.merged == true
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_labels: "content,improvement,dbt Core"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
84
.github/workflows/main.yml
vendored
84
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -103,26 +108,59 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +203,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -182,8 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
102
CHANGELOG.md
102
CHANGELOG.md
@@ -5,6 +5,108 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.7.0-b2 - September 01, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Add node attributes related to compilation to run_results.json ([#7519](https://github.com/dbt-labs/dbt-core/issues/7519))
|
||||
- Support configuration of semantic models with the addition of enable/disable and group enablement. ([#7968](https://github.com/dbt-labs/dbt-core/issues/7968))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add support for swapping materialized views with tables/views and vice versa ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Turn breaking changes to contracted models into warnings for unversioned models ([#8384](https://github.com/dbt-labs/dbt-core/issues/8384), [#8282](https://github.com/dbt-labs/dbt-core/issues/8282))
|
||||
- Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension` ([#8453](https://github.com/dbt-labs/dbt-core/issues/8453))
|
||||
- fix ambiguous reference error for tests and versions when model name is duplicated across packages ([#8327](https://github.com/dbt-labs/dbt-core/issues/8327), [#8493](https://github.com/dbt-labs/dbt-core/issues/8493))
|
||||
- Fix "Internal Error: Expected node <unique-id> not found in manifest" when depends_on set on ModelNodeArgs ([#8506](https://github.com/dbt-labs/dbt-core/issues/8506))
|
||||
- Fix snapshot success message ([#7583](https://github.com/dbt-labs/dbt-core/issues/7583))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix newline escapes and improve formatting in docker README ([dbt-docs/#8211](https://github.com/dbt-labs/dbt-docs/issues/8211))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Switch from hologram to mashumaro jsonschema ([#8426](https://github.com/dbt-labs/dbt-core/issues/8426))
|
||||
- Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>, relation agnostic in /macros/relations ([#8449](https://github.com/dbt-labs/dbt-core/issues/8449))
|
||||
- Update typing to meet mypy standards ([#8396](https://github.com/dbt-labs/dbt-core/issues/8396))
|
||||
- Mypy errors - adapters/factory.py ([#8387](https://github.com/dbt-labs/dbt-core/issues/8387))
|
||||
|
||||
### Contributors
|
||||
- [@jamezrin](https://github.com/jamezrin) ([#8211](https://github.com/dbt-labs/dbt-core/issues/8211))
|
||||
|
||||
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.01% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
|
||||
@@ -400,7 +400,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
@@ -408,7 +408,28 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param int limit: If set, limits the result set
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
"""
|
||||
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||
|
||||
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`add_select_query` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -43,7 +43,7 @@ from dbt.exceptions import (
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.adapters.protocol import AdapterConfig
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
@@ -60,7 +60,7 @@ from dbt.events.types import (
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName,
|
||||
@@ -208,7 +208,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Relation: Type[BaseRelation] = BaseRelation
|
||||
Column: Type[BaseColumn] = BaseColumn
|
||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
||||
ConnectionManager: Type[BaseConnectionManager]
|
||||
|
||||
# A set of clobber config fields accepted by this adapter
|
||||
# for use in materializations
|
||||
@@ -315,14 +315,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
"""
|
||||
TODO: Can we move this to dbt-bigquery?
|
||||
Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:param str table: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
if hasattr(self.connections, "get_partitions_metadata"):
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
@@ -453,9 +460,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||
for relation in cache_schemas:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
if relation.schema:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(
|
||||
|
||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
|
||||
@@ -25,9 +25,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, List
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
@@ -35,6 +35,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
include_policy: Policy = field(default_factory=lambda: Policy())
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
||||
renameable_relations: List[str] = field(
|
||||
default_factory=lambda: [RelationType.Table, RelationType.View]
|
||||
)
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
@@ -169,7 +173,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
@@ -286,6 +289,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
@property
|
||||
def can_be_renamed(self):
|
||||
return self.type in self.renameable_relations
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ Adapter = AdapterProtocol
|
||||
|
||||
|
||||
class AdapterContainer:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.lock = threading.Lock()
|
||||
self.adapters: Dict[str, Adapter] = {}
|
||||
self.plugins: Dict[str, AdapterPlugin] = {}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
|
||||
import agate
|
||||
|
||||
@@ -131,14 +131,6 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
|
||||
@@ -61,7 +61,6 @@ def args_to_context(args: List[str]) -> Context:
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if isinstance(sub_command, Group):
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
@@ -319,7 +318,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
if k == WHICH_KEY:
|
||||
if v != command.value:
|
||||
@@ -344,7 +342,8 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
elif v in (None, False):
|
||||
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||
elif v is None or v is False:
|
||||
add_fn(f"--no-{spinal_cased}")
|
||||
elif v is True:
|
||||
add_fn(f"--{spinal_cased}")
|
||||
|
||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@@ -140,6 +141,7 @@ class dbtRunner:
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
|
||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
@@ -248,6 +257,14 @@ partial_parse_file_path = click.option(
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
partial_parse_file_diff = click.option(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
@@ -380,9 +397,9 @@ inline = click.option(
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
selector = click.option(
|
||||
"--selector",
|
||||
|
||||
@@ -9,10 +9,20 @@ from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
class Integer(agate.data_types.DataType):
|
||||
def cast(self, d):
|
||||
if type(d) == int:
|
||||
return d
|
||||
else:
|
||||
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
|
||||
|
||||
def jsonify(self, d):
|
||||
return d
|
||||
|
||||
|
||||
class Number(agate.data_types.Number):
|
||||
# undo the change in https://github.com/wireservice/agate/pull/733
|
||||
# i.e. do not cast True and False to numeric 1 and 0
|
||||
@@ -48,6 +58,7 @@ def build_type_tester(
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
Integer(null_values=("null", "")),
|
||||
Number(null_values=("null", "")),
|
||||
agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
|
||||
agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
|
||||
@@ -135,12 +146,12 @@ def as_matrix(table):
|
||||
return [r.values() for r in table.rows.values()]
|
||||
|
||||
|
||||
def from_csv(abspath, text_columns):
|
||||
def from_csv(abspath, text_columns, delimiter=","):
|
||||
type_tester = build_type_tester(text_columns=text_columns)
|
||||
with open(abspath, encoding="utf-8") as fp:
|
||||
if fp.read(1) != BOM:
|
||||
fp.seek(0)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester, delimiter=delimiter)
|
||||
|
||||
|
||||
class _NullMarker:
|
||||
|
||||
@@ -191,7 +191,7 @@ NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore
|
||||
|
||||
|
||||
class TemplateCache:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.file_cache: Dict[str, jinja2.Template] = {}
|
||||
|
||||
def get_node_template(self, node) -> jinja2.Template:
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -378,16 +378,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +523,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -426,8 +426,10 @@ class PartialProject(RenderComponents):
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
semantic_models: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
dispatch = cfg.dispatch
|
||||
models = cfg.models
|
||||
@@ -436,6 +438,7 @@ class PartialProject(RenderComponents):
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
metrics = cfg.metrics
|
||||
semantic_models = cfg.semantic_models
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -459,6 +462,8 @@ class PartialProject(RenderComponents):
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict["selectors"]
|
||||
)
|
||||
dbt_cloud = cfg.dbt_cloud
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
@@ -492,12 +497,14 @@ class PartialProject(RenderComponents):
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
metrics=metrics,
|
||||
semantic_models=semantic_models,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
unrendered=unrendered,
|
||||
project_env_vars=project_env_vars,
|
||||
restrict_access=cfg.restrict_access,
|
||||
dbt_cloud=dbt_cloud,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
@@ -598,6 +605,7 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
semantic_models: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -609,6 +617,7 @@ class Project:
|
||||
unrendered: RenderComponents
|
||||
project_env_vars: Dict[str, Any]
|
||||
restrict_access: bool
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
@@ -673,11 +682,13 @@ class Project:
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"semantic-models": self.semantic_models,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
"restrict-access": self.restrict_access,
|
||||
"dbt-cloud": self.dbt_cloud,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
|
||||
@@ -167,6 +167,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
semantic_models=project.semantic_models,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -182,6 +183,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
dbt_cloud=project.dbt_cloud,
|
||||
)
|
||||
|
||||
# Called by 'load_projects' in this class
|
||||
@@ -322,6 +324,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"semantic_models": self._get_config_paths(self.semantic_models),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
from typing import Any, Callable, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
import threading
|
||||
|
||||
from dbt.flags import get_flags
|
||||
@@ -86,33 +88,29 @@ def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
|
||||
|
||||
class ContextMember:
|
||||
def __init__(self, value, name=None):
|
||||
def __init__(self, value: Any, name: Optional[str] = None) -> None:
|
||||
self.name = name
|
||||
self.inner = value
|
||||
|
||||
def key(self, default):
|
||||
def key(self, default: str) -> str:
|
||||
if self.name is None:
|
||||
return default
|
||||
return self.name
|
||||
|
||||
|
||||
def contextmember(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
return ContextMember(value)
|
||||
def contextmember(value: Optional[str] = None) -> Callable:
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
|
||||
|
||||
def contextproperty(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
return ContextMember(property(value))
|
||||
def contextproperty(value: Optional[str] = None) -> Callable:
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
|
||||
|
||||
class ContextMeta(type):
|
||||
def __new__(mcls, name, bases, dct):
|
||||
context_members = {}
|
||||
context_attrs = {}
|
||||
new_dct = {}
|
||||
def __new__(mcls, name, bases, dct: Dict[str, Any]) -> ContextMeta:
|
||||
context_members: Dict[str, Any] = {}
|
||||
context_attrs: Dict[str, Any] = {}
|
||||
new_dct: Dict[str, Any] = {}
|
||||
|
||||
for base in bases:
|
||||
context_members.update(getattr(base, "_context_members_", {}))
|
||||
@@ -148,27 +146,28 @@ class Var:
|
||||
return self._cli_vars
|
||||
|
||||
@property
|
||||
def node_name(self):
|
||||
def node_name(self) -> str:
|
||||
if self._node is not None:
|
||||
return self._node.name
|
||||
else:
|
||||
return "<Configuration>"
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
raise RequiredVarNotFoundError(var_name, self._merged, self._node)
|
||||
def get_missing_var(self, var_name: str) -> NoReturn:
|
||||
# TODO function name implies a non exception resolution
|
||||
raise RequiredVarNotFoundError(var_name, dict(self._merged), self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
def has_var(self, var_name: str) -> bool:
|
||||
return var_name in self._merged
|
||||
|
||||
def get_rendered_var(self, var_name):
|
||||
def get_rendered_var(self, var_name: str) -> Any:
|
||||
raw = self._merged[var_name]
|
||||
# if bool/int/float/etc are passed in, don't compile anything
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
|
||||
return get_rendered(raw, self._context)
|
||||
return get_rendered(raw, dict(self._context))
|
||||
|
||||
def __call__(self, var_name, default=_VAR_NOTSET):
|
||||
def __call__(self, var_name: str, default: Any = _VAR_NOTSET) -> Any:
|
||||
if self.has_var(var_name):
|
||||
return self.get_rendered_var(var_name)
|
||||
elif default is not self._VAR_NOTSET:
|
||||
@@ -178,13 +177,17 @@ class Var:
|
||||
|
||||
|
||||
class BaseContext(metaclass=ContextMeta):
|
||||
# subclass is TargetContext
|
||||
def __init__(self, cli_vars):
|
||||
self._ctx = {}
|
||||
self.cli_vars = cli_vars
|
||||
self.env_vars = {}
|
||||
# Set by ContextMeta
|
||||
_context_members_: Dict[str, Any]
|
||||
_context_attrs_: Dict[str, Any]
|
||||
|
||||
def generate_builtins(self):
|
||||
# subclass is TargetContext
|
||||
def __init__(self, cli_vars: Dict[str, Any]) -> None:
|
||||
self._ctx: Dict[str, Any] = {}
|
||||
self.cli_vars: Dict[str, Any] = cli_vars
|
||||
self.env_vars: Dict[str, Any] = {}
|
||||
|
||||
def generate_builtins(self) -> Dict[str, Any]:
|
||||
builtins: Dict[str, Any] = {}
|
||||
for key, value in self._context_members_.items():
|
||||
if hasattr(value, "__get__"):
|
||||
@@ -194,14 +197,14 @@ class BaseContext(metaclass=ContextMeta):
|
||||
return builtins
|
||||
|
||||
# no dbtClassMixin so this is not an actual override
|
||||
def to_dict(self):
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
self._ctx["context"] = self._ctx
|
||||
builtins = self.generate_builtins()
|
||||
self._ctx["builtins"] = builtins
|
||||
self._ctx.update(builtins)
|
||||
return self._ctx
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def dbt_version(self) -> str:
|
||||
"""The `dbt_version` variable returns the installed version of dbt that
|
||||
is currently running. It can be used for debugging or auditing
|
||||
@@ -221,7 +224,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return dbt_version
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> Var:
|
||||
"""Variables can be passed from your `dbt_project.yml` file into models
|
||||
during compilation. These variables are useful for configuring packages
|
||||
@@ -290,7 +293,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return Var(self._ctx, self.cli_vars)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
@@ -318,7 +321,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
if os.environ.get("DBT_MACRO_DEBUGGING"):
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def debug():
|
||||
"""Enter a debugger at this line in the compiled jinja code."""
|
||||
@@ -357,7 +360,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
raise MacroReturn(data)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def fromjson(string: str, default: Any = None) -> Any:
|
||||
"""The `fromjson` context method can be used to deserialize a json
|
||||
@@ -378,7 +381,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def tojson(value: Any, default: Any = None, sort_keys: bool = False) -> Any:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
@@ -401,7 +404,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def fromyaml(value: str, default: Any = None) -> Any:
|
||||
"""The fromyaml context method can be used to deserialize a yaml string
|
||||
@@ -432,7 +435,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
# safe_dump defaults to sort_keys=True, but we act like json.dumps (the
|
||||
# opposite)
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def toyaml(
|
||||
value: Any, default: Optional[str] = None, sort_keys: bool = False
|
||||
@@ -477,7 +480,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def set_strict(value: Iterable[Any]) -> Set[Any]:
|
||||
"""The `set_strict` context method can be used to convert any iterable
|
||||
@@ -519,7 +522,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def zip_strict(*args: Iterable[Any]) -> Iterable[Any]:
|
||||
"""The `zip_strict` context method can be used to used to return
|
||||
@@ -541,7 +544,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError as e:
|
||||
raise ZipStrictWrongTypeError(e)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def log(msg: str, info: bool = False) -> str:
|
||||
"""Logs a line to either the log file or stdout.
|
||||
@@ -562,7 +565,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info()))
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def run_started_at(self) -> Optional[datetime.datetime]:
|
||||
"""`run_started_at` outputs the timestamp that this run started, e.g.
|
||||
`2017-04-21 01:23:45.678`. The `run_started_at` variable is a Python
|
||||
@@ -590,19 +593,19 @@ class BaseContext(metaclass=ContextMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def invocation_id(self) -> Optional[str]:
|
||||
"""invocation_id outputs a UUID generated for this dbt run (useful for
|
||||
auditing)
|
||||
"""
|
||||
return get_invocation_id()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def thread_id(self) -> str:
|
||||
"""thread_id outputs an ID for the current thread (useful for auditing)"""
|
||||
return threading.current_thread().name
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def modules(self) -> Dict[str, Any]:
|
||||
"""The `modules` variable in the Jinja context contains useful Python
|
||||
modules for operating on data.
|
||||
@@ -627,7 +630,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
""" # noqa
|
||||
return get_context_modules()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def flags(self) -> Any:
|
||||
"""The `flags` variable contains true/false values for flags provided
|
||||
on the command line.
|
||||
@@ -644,7 +647,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return flags_module.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def print(msg: str) -> str:
|
||||
"""Prints a line to stdout.
|
||||
@@ -662,7 +665,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def diff_of_two_dicts(
|
||||
dict_a: Dict[str, List[str]], dict_b: Dict[str, List[str]]
|
||||
@@ -691,7 +694,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
dict_diff.update({k: dict_a[k]})
|
||||
return dict_diff
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def local_md5(value: str) -> str:
|
||||
"""Calculates an MD5 hash of the given string.
|
||||
|
||||
@@ -19,7 +19,7 @@ class ConfiguredContext(TargetContext):
|
||||
super().__init__(config.to_target_dict(), config.cli_vars)
|
||||
self.config = config
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def project_name(self) -> str:
|
||||
return self.config.project_name
|
||||
|
||||
@@ -80,11 +80,11 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
self._project_name = project_name
|
||||
self.schema_yaml_vars = schema_yaml_vars
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(self._ctx, self.config, self._project_name)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
@@ -113,7 +113,7 @@ class MacroResolvingContext(ConfiguredContext):
|
||||
def __init__(self, config):
|
||||
super().__init__(config)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(self._ctx, self.config, self.config.project_name)
|
||||
|
||||
|
||||
@@ -45,6 +45,8 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.SemanticModel:
|
||||
model_configs = unrendered.get("semantic_models")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
else:
|
||||
@@ -70,6 +72,8 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.SemanticModel:
|
||||
model_configs = self.project.semantic_models
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
else:
|
||||
@@ -189,9 +193,21 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
|
||||
def _update_from_config(self, result: C, partial: Dict[str, Any], validate: bool = False) -> C:
|
||||
translated = self._active_project.credentials.translate_aliases(partial)
|
||||
return result.update_from(
|
||||
translated = self.translate_hook_names(translated)
|
||||
updated = result.update_from(
|
||||
translated, self._active_project.credentials.type, validate=validate
|
||||
)
|
||||
return updated
|
||||
|
||||
def translate_hook_names(self, project_dict):
|
||||
# This is a kind of kludge because the fix for #6411 specifically allowed misspelling
|
||||
# the hook field names in dbt_project.yml, which only ever worked because we didn't
|
||||
# run validate on the dbt_project configs.
|
||||
if "pre_hook" in project_dict:
|
||||
project_dict["pre-hook"] = project_dict.pop("pre_hook")
|
||||
if "post_hook" in project_dict:
|
||||
project_dict["post-hook"] = project_dict.pop("post_hook")
|
||||
return project_dict
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
|
||||
@@ -24,7 +24,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
self.node = node
|
||||
self.manifest = manifest
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def doc(self, *args: str) -> str:
|
||||
"""The `doc` function is used to reference docs blocks in schema.yml
|
||||
files. It is analogous to the `ref` function. For more information,
|
||||
|
||||
@@ -40,7 +40,7 @@ class MacroResolver:
|
||||
self._build_internal_packages_namespace()
|
||||
self._build_macros_by_name()
|
||||
|
||||
def _build_internal_packages_namespace(self):
|
||||
def _build_internal_packages_namespace(self) -> None:
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
# in the list are the ones we want to "win".
|
||||
self.internal_packages_namespace: MacroNamespace = {}
|
||||
@@ -56,7 +56,7 @@ class MacroResolver:
|
||||
# root package namespace
|
||||
# non-internal packages (that aren't local or root)
|
||||
# dbt internal packages
|
||||
def _build_macros_by_name(self):
|
||||
def _build_macros_by_name(self) -> None:
|
||||
macros_by_name = {}
|
||||
|
||||
# all internal packages (already in the right order)
|
||||
@@ -78,7 +78,7 @@ class MacroResolver:
|
||||
self,
|
||||
package_namespaces: Dict[str, MacroNamespace],
|
||||
macro: Macro,
|
||||
):
|
||||
) -> None:
|
||||
if macro.package_name in package_namespaces:
|
||||
namespace = package_namespaces[macro.package_name]
|
||||
else:
|
||||
@@ -89,7 +89,7 @@ class MacroResolver:
|
||||
raise DuplicateMacroNameError(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: Macro):
|
||||
def add_macro(self, macro: Macro) -> None:
|
||||
macro_name: str = macro.name
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
@@ -103,11 +103,11 @@ class MacroResolver:
|
||||
if macro.package_name == self.root_project_name:
|
||||
self.root_package_macros[macro_name] = macro
|
||||
|
||||
def add_macros(self):
|
||||
def add_macros(self) -> None:
|
||||
for macro in self.macros.values():
|
||||
self.add_macro(macro)
|
||||
|
||||
def get_macro(self, local_package, macro_name):
|
||||
def get_macro(self, local_package, macro_name) -> Optional[Macro]:
|
||||
local_package_macros = {}
|
||||
# If the macro is explicitly prefixed with an internal namespace
|
||||
# (e.g. 'dbt.some_macro'), look there first
|
||||
@@ -125,7 +125,7 @@ class MacroResolver:
|
||||
return self.macros_by_name[macro_name]
|
||||
return None
|
||||
|
||||
def get_macro_id(self, local_package, macro_name):
|
||||
def get_macro_id(self, local_package, macro_name) -> Optional[str]:
|
||||
macro = self.get_macro(local_package, macro_name)
|
||||
if macro is None:
|
||||
return None
|
||||
|
||||
@@ -67,7 +67,7 @@ class ManifestContext(ConfiguredContext):
|
||||
dct.update(self.namespace)
|
||||
return dct
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def context_macro_stack(self):
|
||||
return self.macro_stack
|
||||
|
||||
|
||||
@@ -754,19 +754,19 @@ class ProviderContext(ManifestContext):
|
||||
self.model,
|
||||
)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def dbt_metadata_envs(self) -> Dict[str, str]:
|
||||
return get_metadata_vars()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def invocation_args_dict(self):
|
||||
return args_to_dict(self.config.args)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def _sql_results(self) -> Dict[str, Optional[AttrDict]]:
|
||||
return self.sql_results
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def load_result(self, name: str) -> Optional[AttrDict]:
|
||||
if name in self.sql_results:
|
||||
# handle the special case of "main" macro
|
||||
@@ -787,7 +787,7 @@ class ProviderContext(ManifestContext):
|
||||
# Handle trying to load a result that was never stored
|
||||
return None
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def store_result(
|
||||
self, name: str, response: Any, agate_table: Optional[agate.Table] = None
|
||||
) -> str:
|
||||
@@ -803,7 +803,7 @@ class ProviderContext(ManifestContext):
|
||||
)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def store_raw_result(
|
||||
self,
|
||||
name: str,
|
||||
@@ -815,7 +815,7 @@ class ProviderContext(ManifestContext):
|
||||
response = AdapterResponse(_message=message, code=code, rows_affected=rows_affected)
|
||||
return self.store_result(name, response, agate_table)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def validation(self):
|
||||
def validate_any(*args) -> Callable[[T], None]:
|
||||
def inner(value: T) -> None:
|
||||
@@ -836,7 +836,7 @@ class ProviderContext(ManifestContext):
|
||||
}
|
||||
)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def write(self, payload: str) -> str:
|
||||
# macros/source defs aren't 'writeable'.
|
||||
if isinstance(self.model, (Macro, SourceDefinition)):
|
||||
@@ -845,11 +845,11 @@ class ProviderContext(ManifestContext):
|
||||
self.model.write_node(self.config.project_root, self.model.build_path, payload)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def render(self, string: str) -> str:
|
||||
return get_rendered(string, self._ctx, self.model)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def try_or_compiler_error(
|
||||
self, message_if_exception: str, func: Callable, *args, **kwargs
|
||||
) -> Any:
|
||||
@@ -858,21 +858,22 @@ class ProviderContext(ManifestContext):
|
||||
except Exception:
|
||||
raise CompilationError(message_if_exception, self.model)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def load_agate_table(self) -> agate.Table:
|
||||
if not isinstance(self.model, SeedNode):
|
||||
raise LoadAgateTableNotSeedError(self.model.resource_type, node=self.model)
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
column_types = self.model.config.column_types
|
||||
delimiter = self.model.config.delimiter
|
||||
try:
|
||||
table = agate_helper.from_csv(path, text_columns=column_types)
|
||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||
except ValueError as e:
|
||||
raise LoadAgateTableValueError(e, node=self.model)
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
return table
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def ref(self) -> Callable:
|
||||
"""The most important function in dbt is `ref()`; it's impossible to
|
||||
build even moderately complex models without it. `ref()` is how you
|
||||
@@ -913,11 +914,11 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return self.provider.ref(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def source(self) -> Callable:
|
||||
return self.provider.source(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def metric(self) -> Callable:
|
||||
return self.provider.metric(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@@ -978,7 +979,7 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return self.provider.Config(self.model, self.context_config)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def execute(self) -> bool:
|
||||
"""`execute` is a Jinja variable that returns True when dbt is in
|
||||
"execute" mode.
|
||||
@@ -1039,7 +1040,7 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return self.provider.execute
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def exceptions(self) -> Dict[str, Any]:
|
||||
"""The exceptions namespace can be used to raise warnings and errors in
|
||||
dbt userspace.
|
||||
@@ -1077,15 +1078,15 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return wrapped_exports(self.model)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def database(self) -> str:
|
||||
return self.config.credentials.database
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def schema(self) -> str:
|
||||
return self.config.credentials.schema
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ModelConfiguredVar:
|
||||
return self.provider.Var(
|
||||
context=self._ctx,
|
||||
@@ -1102,22 +1103,22 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return self.db_wrapper
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def api(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"Relation": self.db_wrapper.Relation,
|
||||
"Column": self.adapter.Column,
|
||||
}
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def column(self) -> Type[Column]:
|
||||
return self.adapter.Column
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def env(self) -> Dict[str, Any]:
|
||||
return self.target
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def graph(self) -> Dict[str, Any]:
|
||||
"""The `graph` context variable contains information about the nodes in
|
||||
your dbt project. Models, sources, tests, and snapshots are all
|
||||
@@ -1233,23 +1234,23 @@ class ProviderContext(ManifestContext):
|
||||
ret["compiled_sql"] = ret["compiled_code"]
|
||||
return ret
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def pre_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def post_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql_now(self) -> str:
|
||||
return self.adapter.date_function()
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def adapter_macro(self, name: str, *args, **kwargs):
|
||||
"""This was deprecated in v0.18 in favor of adapter.dispatch"""
|
||||
msg = (
|
||||
@@ -1261,7 +1262,7 @@ class ProviderContext(ManifestContext):
|
||||
)
|
||||
raise CompilationError(msg)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
@@ -1305,7 +1306,7 @@ class ProviderContext(ManifestContext):
|
||||
else:
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def selected_resources(self) -> List[str]:
|
||||
"""The `selected_resources` variable contains a list of the resources
|
||||
selected based on the parameters provided to the dbt command.
|
||||
@@ -1314,7 +1315,7 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return selected_resources.SELECTED_RESOURCES
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def submit_python_job(self, parsed_model: Dict, compiled_code: str) -> AdapterResponse:
|
||||
# Check macro_stack and that the unique id is for a materialization macro
|
||||
if not (
|
||||
@@ -1357,7 +1358,7 @@ class MacroContext(ProviderContext):
|
||||
class ModelContext(ProviderContext):
|
||||
model: ManifestNode
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def pre_hooks(self) -> List[Dict[str, Any]]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
@@ -1366,7 +1367,7 @@ class ModelContext(ProviderContext):
|
||||
h.to_dict(omit_none=True) for h in self.model.config.pre_hook # type: ignore[union-attr] # noqa
|
||||
]
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def post_hooks(self) -> List[Dict[str, Any]]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
@@ -1375,7 +1376,7 @@ class ModelContext(ProviderContext):
|
||||
h.to_dict(omit_none=True) for h in self.model.config.post_hook # type: ignore[union-attr] # noqa
|
||||
]
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql(self) -> Optional[str]:
|
||||
# only doing this in sql model for backward compatible
|
||||
if self.model.language == ModelLanguage.sql: # type: ignore[union-attr]
|
||||
@@ -1392,7 +1393,7 @@ class ModelContext(ProviderContext):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def compiled_code(self) -> Optional[str]:
|
||||
if getattr(self.model, "defer_relation", None):
|
||||
# TODO https://github.com/dbt-labs/dbt-core/issues/7976
|
||||
@@ -1403,15 +1404,15 @@ class ModelContext(ProviderContext):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def database(self) -> str:
|
||||
return getattr(self.model, "database", self.config.credentials.database)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def schema(self) -> str:
|
||||
return getattr(self.model, "schema", self.config.credentials.schema)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def this(self) -> Optional[RelationProxy]:
|
||||
"""`this` makes available schema information about the currently
|
||||
executing model. It's is useful in any context in which you need to
|
||||
@@ -1446,7 +1447,7 @@ class ModelContext(ProviderContext):
|
||||
return None
|
||||
return self.db_wrapper.Relation.create_from(self.config, self.model)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def defer_relation(self) -> Optional[RelationProxy]:
|
||||
"""
|
||||
For commands which add information about this node's corresponding
|
||||
@@ -1660,7 +1661,7 @@ class TestContext(ProviderContext):
|
||||
)
|
||||
self.namespace = macro_namespace
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
|
||||
@@ -14,7 +14,7 @@ class SecretContext(BaseContext):
|
||||
"""This context is used in profiles.yml + packages.yml. It can render secret
|
||||
env vars that aren't usable elsewhere"""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
|
||||
@@ -9,7 +9,7 @@ class TargetContext(BaseContext):
|
||||
super().__init__(cli_vars=cli_vars)
|
||||
self.target_dict = target_dict
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def target(self) -> Dict[str, Any]:
|
||||
"""`target` contains information about your connection to the warehouse
|
||||
(specified in profiles.yml). Some configs are shared between all
|
||||
|
||||
@@ -16,26 +16,21 @@ from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from typing_extensions import Protocol
|
||||
from typing_extensions import Protocol, Annotated
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
StrEnum,
|
||||
ExtensibleDbtClassMixin,
|
||||
HyphenatedDbtClassMixin,
|
||||
ValidatedStringMixin,
|
||||
register_pattern,
|
||||
)
|
||||
from dbt.contracts.util import Replaceable
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
||||
|
||||
|
||||
# we need register_pattern for jsonschema validation
|
||||
register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$")
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(dbtClassMixin):
|
||||
_message: str
|
||||
@@ -55,7 +50,8 @@ class ConnectionState(StrEnum):
|
||||
|
||||
@dataclass(init=False)
|
||||
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||
type: Identifier
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
|
||||
name: Optional[str] = None
|
||||
state: ConnectionState = ConnectionState.INIT
|
||||
transaction_open: bool = False
|
||||
@@ -161,6 +157,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
# Need to fixup dbname => database, pass => password
|
||||
data = cls.translate_aliases(data)
|
||||
return data
|
||||
|
||||
@@ -220,10 +217,10 @@ DEFAULT_QUERY_COMMENT = """
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryComment(HyphenatedDbtClassMixin):
|
||||
class QueryComment(dbtClassMixin):
|
||||
comment: str = DEFAULT_QUERY_COMMENT
|
||||
append: bool = False
|
||||
job_label: bool = False
|
||||
job_label: bool = field(default=False, metadata={"alias": "job-label"})
|
||||
|
||||
|
||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
|
||||
@@ -225,6 +225,8 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
# metrics generated from semantic_model measures
|
||||
generated_metrics: List[str] = field(default_factory=list)
|
||||
groups: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -331,18 +331,29 @@ class SemanticModelByMeasureLookup(dbtClassMixin):
|
||||
"""Populate storage with all the measure + package paths to the Manifest's SemanticModels"""
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.add(semantic_model=semantic_model)
|
||||
for disabled in manifest.disabled.values():
|
||||
for node in disabled:
|
||||
if isinstance(node, SemanticModel):
|
||||
self.add(semantic_model=node)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SemanticModel:
|
||||
"""Tries to get a SemanticModel from the Manifest"""
|
||||
semantic_model = manifest.semantic_models.get(unique_id)
|
||||
if semantic_model is None:
|
||||
enabled_semantic_model: Optional[SemanticModel] = manifest.semantic_models.get(unique_id)
|
||||
disabled_semantic_model: Optional[List] = manifest.disabled.get(unique_id)
|
||||
|
||||
if isinstance(enabled_semantic_model, SemanticModel):
|
||||
return enabled_semantic_model
|
||||
elif disabled_semantic_model is not None and isinstance(
|
||||
disabled_semantic_model[0], SemanticModel
|
||||
):
|
||||
return disabled_semantic_model[0]
|
||||
else:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Semantic model `{unique_id}` found in cache but not found in manifest"
|
||||
)
|
||||
return semantic_model
|
||||
|
||||
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures/semantic_models
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||
@@ -927,6 +938,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
groupable_nodes = list(
|
||||
chain(
|
||||
self.nodes.values(),
|
||||
self.semantic_models.values(),
|
||||
self.metrics.values(),
|
||||
)
|
||||
)
|
||||
@@ -1056,8 +1068,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
return resolved_refs
|
||||
|
||||
# Called by dbt.parser.manifest._process_refs_for_exposure, _process_refs_for_metric,
|
||||
# and dbt.parser.manifest._process_refs_for_node
|
||||
# Called by dbt.parser.manifest._process_refs & ManifestLoader.check_for_model_deprecations
|
||||
def resolve_ref(
|
||||
self,
|
||||
source_node: GraphMemberNode,
|
||||
@@ -1156,6 +1167,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
semantic_model = self.semantic_model_by_measure_lookup.find(
|
||||
target_measure_name, pkg, self
|
||||
)
|
||||
# need to return it even if it's disabled so know it's not fully missing
|
||||
if semantic_model is not None:
|
||||
return semantic_model
|
||||
|
||||
@@ -1331,10 +1343,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.exposures[exposure.unique_id] = exposure
|
||||
source_file.exposures.append(exposure.unique_id)
|
||||
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric):
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric, generated: bool = False):
|
||||
_check_duplicates(metric, self.metrics)
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
if not generated:
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
else:
|
||||
source_file.generated_metrics.append(metric.unique_id)
|
||||
|
||||
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||
_check_duplicates(group, self.groups)
|
||||
@@ -1356,6 +1371,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, Metric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, SemanticModel):
|
||||
source_file.semantic_models.append(node.unique_id)
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
@@ -1422,7 +1439,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 10)
|
||||
@schema_version("manifest", 11)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1486,6 +1503,7 @@ class WritableManifest(ArtifactMixin):
|
||||
("manifest", 7),
|
||||
("manifest", 8),
|
||||
("manifest", 9),
|
||||
("manifest", 10),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
@@ -1493,7 +1511,7 @@ class WritableManifest(ArtifactMixin):
|
||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
|
||||
manifest_schema_version = get_manifest_schema_version(data)
|
||||
if manifest_schema_version <= 9:
|
||||
if manifest_schema_version <= 10:
|
||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||
return cls.from_dict(data)
|
||||
|
||||
|
||||
@@ -2,11 +2,11 @@ from dataclasses import field, Field, dataclass
|
||||
from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import Any, List, Optional, Dict, Union, Type, TypeVar, Callable
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
ValidationError,
|
||||
register_pattern,
|
||||
StrEnum,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed, Docs
|
||||
@@ -15,6 +15,7 @@ from dbt.contracts.util import Replaceable, list_str
|
||||
from dbt.exceptions import DbtInternalError, CompilationError
|
||||
from dbt import hooks
|
||||
from dbt.node_types import NodeType
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
|
||||
M = TypeVar("M", bound="Metadata")
|
||||
@@ -188,9 +189,6 @@ class Severity(str):
|
||||
pass
|
||||
|
||||
|
||||
register_pattern(Severity, insensitive_patterns("warn", "error"))
|
||||
|
||||
|
||||
class OnConfigurationChangeOption(StrEnum):
|
||||
Apply = "apply"
|
||||
Continue = "continue"
|
||||
@@ -376,25 +374,23 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
self.validate(dct)
|
||||
return self.from_dict(dct)
|
||||
|
||||
def replace(self, **kwargs):
|
||||
dct = self.to_dict(omit_none=True)
|
||||
|
||||
mapping = self.field_mapping()
|
||||
for key, value in kwargs.items():
|
||||
new_key = mapping.get(key, key)
|
||||
dct[new_key] = value
|
||||
return self.from_dict(dct)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SemanticModelConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
group: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
group: Optional[str] = None
|
||||
group: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -447,11 +443,11 @@ class NodeConfig(NodeAndTestConfig):
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
metadata={"merge": MergeBehavior.Append, "alias": "post-hook"},
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
metadata={"merge": MergeBehavior.Append, "alias": "pre-hook"},
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
@@ -511,30 +507,11 @@ class NodeConfig(NodeAndTestConfig):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
field_map = {"post-hook": "post_hook", "pre-hook": "pre_hook"}
|
||||
# create a new dict because otherwise it gets overwritten in
|
||||
# tests
|
||||
new_dict = {}
|
||||
for key in data:
|
||||
new_dict[key] = data[key]
|
||||
data = new_dict
|
||||
for key in hooks.ModelHookType:
|
||||
if key in data:
|
||||
data[key] = [hooks.get_hook_dict(h) for h in data[key]]
|
||||
for field_name in field_map:
|
||||
if field_name in data:
|
||||
new_name = field_map[field_name]
|
||||
data[new_name] = data.pop(field_name)
|
||||
return data
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
dct = super().__post_serialize__(dct)
|
||||
field_map = {"post_hook": "post-hook", "pre_hook": "pre-hook"}
|
||||
for field_name in field_map:
|
||||
if field_name in dct:
|
||||
dct[field_map[field_name]] = dct.pop(field_name)
|
||||
return dct
|
||||
|
||||
# this is still used by jsonschema validation
|
||||
@classmethod
|
||||
def field_mapping(cls):
|
||||
@@ -544,6 +521,7 @@ class NodeConfig(NodeAndTestConfig):
|
||||
@dataclass
|
||||
class SeedConfig(NodeConfig):
|
||||
materialized: str = "seed"
|
||||
delimiter: str = ","
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
@@ -553,6 +531,9 @@ class SeedConfig(NodeConfig):
|
||||
raise ValidationError("A seed must have a materialized value of 'seed'")
|
||||
|
||||
|
||||
SEVERITY_PATTERN = r"^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestConfig(NodeAndTestConfig):
|
||||
__test__ = False
|
||||
@@ -563,7 +544,8 @@ class TestConfig(NodeAndTestConfig):
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
materialized: str = "test"
|
||||
severity: Severity = Severity("ERROR")
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
severity: Annotated[Severity, Pattern(SEVERITY_PATTERN)] = Severity("ERROR")
|
||||
store_failures: Optional[bool] = None
|
||||
where: Optional[str] = None
|
||||
limit: Optional[int] = None
|
||||
@@ -619,6 +601,8 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super().validate(data)
|
||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||
# will fail when parsing the snapshot node.
|
||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||
raise ValidationError(
|
||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||
@@ -649,6 +633,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||
|
||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||
def finalize_and_validate(self):
|
||||
data = self.to_dict(omit_none=True)
|
||||
self.validate(data)
|
||||
@@ -657,6 +642,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
|
||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||
NodeType.Metric: MetricConfig,
|
||||
NodeType.SemanticModel: SemanticModelConfig,
|
||||
NodeType.Exposure: ExposureConfig,
|
||||
NodeType.Source: SourceConfig,
|
||||
NodeType.Seed: SeedConfig,
|
||||
|
||||
@@ -29,3 +29,11 @@ class ModelNodeArgs:
|
||||
unique_id = f"{unique_id}.v{self.version}"
|
||||
|
||||
return unique_id
|
||||
|
||||
@property
|
||||
def fqn(self) -> List[str]:
|
||||
fqn = [self.package_name, self.name]
|
||||
if self.version:
|
||||
fqn.append(f"v{self.version}")
|
||||
|
||||
return fqn
|
||||
|
||||
@@ -6,7 +6,7 @@ from enum import Enum
|
||||
import hashlib
|
||||
|
||||
from mashumaro.types import SerializableType
|
||||
from typing import Optional, Union, List, Dict, Any, Sequence, Tuple, Iterator
|
||||
from typing import Optional, Union, List, Dict, Any, Sequence, Tuple, Iterator, Literal
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
||||
|
||||
@@ -44,12 +44,14 @@ from dbt.events.types import (
|
||||
SeedExceedsLimitSamePath,
|
||||
SeedExceedsLimitAndPathChanged,
|
||||
SeedExceedsLimitChecksumChanged,
|
||||
UnversionedBreakingChange,
|
||||
)
|
||||
from dbt.events.contextvars import set_log_contextvars
|
||||
from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||
from dbt_semantic_interfaces.references import (
|
||||
EntityReference,
|
||||
MeasureReference,
|
||||
LinkableElementReference,
|
||||
SemanticModelReference,
|
||||
@@ -554,18 +556,18 @@ class CompiledNode(ParsedNode):
|
||||
|
||||
@dataclass
|
||||
class AnalysisNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Analysis]})
|
||||
resource_type: Literal[NodeType.Analysis]
|
||||
|
||||
|
||||
@dataclass
|
||||
class HookNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Operation]})
|
||||
resource_type: Literal[NodeType.Operation]
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Model]})
|
||||
resource_type: Literal[NodeType.Model]
|
||||
access: AccessType = AccessType.Protected
|
||||
constraints: List[ModelLevelConstraint] = field(default_factory=list)
|
||||
version: Optional[NodeVersion] = None
|
||||
@@ -589,7 +591,7 @@ class ModelNode(CompiledNode):
|
||||
name=args.name,
|
||||
package_name=args.package_name,
|
||||
unique_id=unique_id,
|
||||
fqn=[args.package_name, args.name],
|
||||
fqn=args.fqn,
|
||||
version=args.version,
|
||||
latest_version=args.latest_version,
|
||||
relation_name=args.relation_name,
|
||||
@@ -625,6 +627,18 @@ class ModelNode(CompiledNode):
|
||||
def materialization_enforces_constraints(self) -> bool:
|
||||
return self.config.materialized in ["table", "incremental"]
|
||||
|
||||
def same_contents(self, old, adapter_type) -> bool:
|
||||
return super().same_contents(old, adapter_type) and self.same_ref_representation(old)
|
||||
|
||||
def same_ref_representation(self, old) -> bool:
|
||||
return (
|
||||
# Changing the latest_version may break downstream unpinned refs
|
||||
self.latest_version == old.latest_version
|
||||
# Changes to access or deprecation_date may lead to ref-related parsing errors
|
||||
and self.access == old.access
|
||||
and self.deprecation_date == old.deprecation_date
|
||||
)
|
||||
|
||||
def build_contract_checksum(self):
|
||||
# We don't need to construct the checksum if the model does not
|
||||
# have contract enforced, because it won't be used.
|
||||
@@ -669,11 +683,11 @@ class ModelNode(CompiledNode):
|
||||
# These are the categories of breaking changes:
|
||||
contract_enforced_disabled: bool = False
|
||||
columns_removed: List[str] = []
|
||||
column_type_changes: List[Tuple[str, str, str]] = []
|
||||
enforced_column_constraint_removed: List[Tuple[str, str]] = [] # column, constraint_type
|
||||
enforced_model_constraint_removed: List[
|
||||
Tuple[str, List[str]]
|
||||
] = [] # constraint_type, columns
|
||||
column_type_changes: List[Dict[str, str]] = []
|
||||
enforced_column_constraint_removed: List[
|
||||
Dict[str, str]
|
||||
] = [] # column_name, constraint_type
|
||||
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
|
||||
materialization_changed: List[str] = []
|
||||
|
||||
if old.contract.enforced is True and self.contract.enforced is False:
|
||||
@@ -695,11 +709,11 @@ class ModelNode(CompiledNode):
|
||||
# Has this column's data type changed?
|
||||
elif old_value.data_type != self.columns[old_key].data_type:
|
||||
column_type_changes.append(
|
||||
(
|
||||
str(old_value.name),
|
||||
str(old_value.data_type),
|
||||
str(self.columns[old_key].data_type),
|
||||
)
|
||||
{
|
||||
"column_name": str(old_value.name),
|
||||
"previous_column_type": str(old_value.data_type),
|
||||
"current_column_type": str(self.columns[old_key].data_type),
|
||||
}
|
||||
)
|
||||
|
||||
# track if there are any column level constraints for the materialization check late
|
||||
@@ -720,7 +734,11 @@ class ModelNode(CompiledNode):
|
||||
and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED
|
||||
):
|
||||
enforced_column_constraint_removed.append(
|
||||
(old_key, str(old_constraint.type))
|
||||
{
|
||||
"column_name": old_key,
|
||||
"constraint_name": old_constraint.name,
|
||||
"constraint_type": ConstraintType(old_constraint.type),
|
||||
}
|
||||
)
|
||||
|
||||
# Now compare the model level constraints
|
||||
@@ -731,7 +749,11 @@ class ModelNode(CompiledNode):
|
||||
and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED
|
||||
):
|
||||
enforced_model_constraint_removed.append(
|
||||
(str(old_constraint.type), old_constraint.columns)
|
||||
{
|
||||
"constraint_name": old_constraint.name,
|
||||
"constraint_type": ConstraintType(old_constraint.type),
|
||||
"columns": old_constraint.columns,
|
||||
}
|
||||
)
|
||||
|
||||
# Check for relevant materialization changes.
|
||||
@@ -745,7 +767,8 @@ class ModelNode(CompiledNode):
|
||||
# If a column has been added, it will be missing in the old.columns, and present in self.columns
|
||||
# That's a change (caught by the different checksums), but not a breaking change
|
||||
|
||||
# Did we find any changes that we consider breaking? If so, that's an error
|
||||
# Did we find any changes that we consider breaking? If there's an enforced contract, that's
|
||||
# a warning unless the model is versioned, then it's an error.
|
||||
if (
|
||||
contract_enforced_disabled
|
||||
or columns_removed
|
||||
@@ -754,32 +777,89 @@ class ModelNode(CompiledNode):
|
||||
or enforced_column_constraint_removed
|
||||
or materialization_changed
|
||||
):
|
||||
raise (
|
||||
ContractBreakingChangeError(
|
||||
contract_enforced_disabled=contract_enforced_disabled,
|
||||
columns_removed=columns_removed,
|
||||
column_type_changes=column_type_changes,
|
||||
enforced_column_constraint_removed=enforced_column_constraint_removed,
|
||||
enforced_model_constraint_removed=enforced_model_constraint_removed,
|
||||
materialization_changed=materialization_changed,
|
||||
|
||||
breaking_changes = []
|
||||
if contract_enforced_disabled:
|
||||
breaking_changes.append(
|
||||
"Contract enforcement was removed: Previously, this model had an enforced contract. It is no longer configured to enforce its contract, and this is a breaking change."
|
||||
)
|
||||
if columns_removed:
|
||||
columns_removed_str = "\n - ".join(columns_removed)
|
||||
breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}")
|
||||
if column_type_changes:
|
||||
column_type_changes_str = "\n - ".join(
|
||||
[
|
||||
f"{c['column_name']} ({c['previous_column_type']} -> {c['current_column_type']})"
|
||||
for c in column_type_changes
|
||||
]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Columns with data_type changes: \n - {column_type_changes_str}"
|
||||
)
|
||||
if enforced_column_constraint_removed:
|
||||
column_constraint_changes_str = "\n - ".join(
|
||||
[
|
||||
f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on column {c['column_name']}"
|
||||
for c in enforced_column_constraint_removed
|
||||
]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}"
|
||||
)
|
||||
if enforced_model_constraint_removed:
|
||||
model_constraint_changes_str = "\n - ".join(
|
||||
[
|
||||
f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on columns {c['columns']}"
|
||||
for c in enforced_model_constraint_removed
|
||||
]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}"
|
||||
)
|
||||
if materialization_changed:
|
||||
materialization_changes_str = (
|
||||
f"{materialization_changed[0]} -> {materialization_changed[1]}"
|
||||
)
|
||||
|
||||
breaking_changes.append(
|
||||
f"Materialization changed with enforced constraints: \n - {materialization_changes_str}"
|
||||
)
|
||||
|
||||
if self.version is None:
|
||||
warn_or_error(
|
||||
UnversionedBreakingChange(
|
||||
contract_enforced_disabled=contract_enforced_disabled,
|
||||
columns_removed=columns_removed,
|
||||
column_type_changes=column_type_changes,
|
||||
enforced_column_constraint_removed=enforced_column_constraint_removed,
|
||||
enforced_model_constraint_removed=enforced_model_constraint_removed,
|
||||
breaking_changes=breaking_changes,
|
||||
model_name=self.name,
|
||||
model_file_path=self.original_file_path,
|
||||
),
|
||||
node=self,
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise (
|
||||
ContractBreakingChangeError(
|
||||
breaking_changes=breaking_changes,
|
||||
node=self,
|
||||
)
|
||||
)
|
||||
|
||||
# Otherwise, though we didn't find any *breaking* changes, the contract has still changed -- same_contract: False
|
||||
else:
|
||||
return False
|
||||
# Otherwise, the contract has changed -- same_contract: False
|
||||
return False
|
||||
|
||||
|
||||
# TODO: rm?
|
||||
@dataclass
|
||||
class RPCNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.RPCCall]})
|
||||
resource_type: Literal[NodeType.RPCCall]
|
||||
|
||||
|
||||
@dataclass
|
||||
class SqlNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.SqlOperation]})
|
||||
resource_type: Literal[NodeType.SqlOperation]
|
||||
|
||||
|
||||
# ====================================
|
||||
@@ -789,7 +869,7 @@ class SqlNode(CompiledNode):
|
||||
|
||||
@dataclass
|
||||
class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
||||
resource_type: Literal[NodeType.Seed]
|
||||
config: SeedConfig = field(default_factory=SeedConfig)
|
||||
# seeds need the root_path because the contents are not loaded initially
|
||||
# and we need the root_path to load the seed later
|
||||
@@ -915,7 +995,7 @@ class TestShouldStoreFailures:
|
||||
|
||||
@dataclass
|
||||
class SingularTestNode(TestShouldStoreFailures, CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Test]})
|
||||
resource_type: Literal[NodeType.Test]
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
||||
@@ -951,7 +1031,7 @@ class HasTestMetadata(dbtClassMixin):
|
||||
|
||||
@dataclass
|
||||
class GenericTestNode(TestShouldStoreFailures, CompiledNode, HasTestMetadata):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Test]})
|
||||
resource_type: Literal[NodeType.Test]
|
||||
column_name: Optional[str] = None
|
||||
file_key_name: Optional[str] = None
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
@@ -984,13 +1064,13 @@ class IntermediateSnapshotNode(CompiledNode):
|
||||
# uses a regular node config, which the snapshot parser will then convert
|
||||
# into a full ParsedSnapshotNode after rendering. Note: it currently does
|
||||
# not work to set snapshot config in schema files because of the validation.
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Snapshot]})
|
||||
resource_type: Literal[NodeType.Snapshot]
|
||||
config: EmptySnapshotConfig = field(default_factory=EmptySnapshotConfig)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SnapshotNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Snapshot]})
|
||||
resource_type: Literal[NodeType.Snapshot]
|
||||
config: SnapshotConfig
|
||||
defer_relation: Optional[DeferRelation] = None
|
||||
|
||||
@@ -1003,7 +1083,7 @@ class SnapshotNode(CompiledNode):
|
||||
@dataclass
|
||||
class Macro(BaseNode):
|
||||
macro_sql: str
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Macro]})
|
||||
resource_type: Literal[NodeType.Macro]
|
||||
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
|
||||
description: str = ""
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
@@ -1033,7 +1113,7 @@ class Macro(BaseNode):
|
||||
@dataclass
|
||||
class Documentation(BaseNode):
|
||||
block_contents: str
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Documentation]})
|
||||
resource_type: Literal[NodeType.Documentation]
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
@@ -1064,7 +1144,7 @@ class UnpatchedSourceDefinition(BaseNode):
|
||||
source: UnparsedSourceDefinition
|
||||
table: UnparsedSourceTableDefinition
|
||||
fqn: List[str]
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Source]})
|
||||
resource_type: Literal[NodeType.Source]
|
||||
patch_path: Optional[str] = None
|
||||
|
||||
def get_full_source_name(self):
|
||||
@@ -1109,7 +1189,7 @@ class ParsedSourceMandatory(GraphNode, HasRelationMetadata):
|
||||
source_description: str
|
||||
loader: str
|
||||
identifier: str
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Source]})
|
||||
resource_type: Literal[NodeType.Source]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1236,7 +1316,7 @@ class SourceDefinition(NodeInfoMixin, ParsedSourceMandatory):
|
||||
class Exposure(GraphNode):
|
||||
type: ExposureType
|
||||
owner: Owner
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Exposure]})
|
||||
resource_type: Literal[NodeType.Exposure]
|
||||
description: str = ""
|
||||
label: Optional[str] = None
|
||||
maturity: Optional[MaturityType] = None
|
||||
@@ -1385,7 +1465,7 @@ class Metric(GraphNode):
|
||||
type_params: MetricTypeParams
|
||||
filter: Optional[WhereFilter] = None
|
||||
metadata: Optional[SourceFileMetadata] = None
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Metric]})
|
||||
resource_type: Literal[NodeType.Metric]
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: MetricConfig = field(default_factory=MetricConfig)
|
||||
@@ -1468,7 +1548,7 @@ class Metric(GraphNode):
|
||||
class Group(BaseNode):
|
||||
name: str
|
||||
owner: Owner
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Group]})
|
||||
resource_type: Literal[NodeType.Group]
|
||||
|
||||
|
||||
# ====================================
|
||||
@@ -1498,6 +1578,9 @@ class SemanticModel(GraphNode):
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
primary_entity: Optional[str] = None
|
||||
group: Optional[str] = None
|
||||
|
||||
@property
|
||||
def entity_references(self) -> List[LinkableElementReference]:
|
||||
@@ -1568,17 +1651,26 @@ class SemanticModel(GraphNode):
|
||||
measure is not None
|
||||
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
|
||||
|
||||
if self.defaults is not None:
|
||||
default_agg_time_dimesion = self.defaults.agg_time_dimension
|
||||
default_agg_time_dimension = (
|
||||
self.defaults.agg_time_dimension if self.defaults is not None else None
|
||||
)
|
||||
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimesion
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension
|
||||
assert agg_time_dimension_name is not None, (
|
||||
f"Aggregation time dimension for measure {measure.name} is not set! This should either be set directly on "
|
||||
f"the measure specification in the model, or else defaulted to the primary time dimension in the data "
|
||||
f"source containing the measure."
|
||||
f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! "
|
||||
"To fix this either specify a default `agg_time_dimension` for the semantic model or define an "
|
||||
"`agg_time_dimension` on the measure directly."
|
||||
)
|
||||
return TimeDimensionReference(element_name=agg_time_dimension_name)
|
||||
|
||||
@property
|
||||
def primary_entity_reference(self) -> Optional[EntityReference]:
|
||||
return (
|
||||
EntityReference(element_name=self.primary_entity)
|
||||
if self.primary_entity is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
|
||||
@@ -23,7 +23,7 @@ from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Union, Dict, Any, Sequence
|
||||
from typing import Optional, List, Union, Dict, Any, Sequence, Literal
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -49,31 +49,18 @@ class HasCode(dbtClassMixin):
|
||||
|
||||
@dataclass
|
||||
class UnparsedMacro(UnparsedBaseNode, HasCode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Macro]})
|
||||
resource_type: Literal[NodeType.Macro]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedGenericTest(UnparsedBaseNode, HasCode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Macro]})
|
||||
resource_type: Literal[NodeType.Macro]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNode(UnparsedBaseNode, HasCode):
|
||||
name: str
|
||||
resource_type: NodeType = field(
|
||||
metadata={
|
||||
"restrict": [
|
||||
NodeType.Model,
|
||||
NodeType.Analysis,
|
||||
NodeType.Test,
|
||||
NodeType.Snapshot,
|
||||
NodeType.Operation,
|
||||
NodeType.Seed,
|
||||
NodeType.RPCCall,
|
||||
NodeType.SqlOperation,
|
||||
]
|
||||
}
|
||||
)
|
||||
resource_type: NodeType
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
@@ -82,7 +69,7 @@ class UnparsedNode(UnparsedBaseNode, HasCode):
|
||||
|
||||
@dataclass
|
||||
class UnparsedRunHook(UnparsedNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Operation]})
|
||||
resource_type: Literal[NodeType.Operation]
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
@@ -220,7 +207,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
||||
deprecation_date: Optional[datetime.datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
def __post_init__(self) -> None:
|
||||
if self.latest_version:
|
||||
version_values = [version.v for version in self.versions]
|
||||
if self.latest_version not in version_values:
|
||||
@@ -228,7 +215,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
|
||||
)
|
||||
|
||||
seen_versions: set[str] = set()
|
||||
seen_versions = set()
|
||||
for version in self.versions:
|
||||
if str(version.v) in seen_versions:
|
||||
raise ParsingError(
|
||||
@@ -689,7 +676,7 @@ class UnparsedEntity(dbtClassMixin):
|
||||
class UnparsedNonAdditiveDimension(dbtClassMixin):
|
||||
name: str
|
||||
window_choice: str # AggregationType enum
|
||||
window_groupings: List[str]
|
||||
window_groupings: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -701,6 +688,7 @@ class UnparsedMeasure(dbtClassMixin):
|
||||
agg_params: Optional[MeasureAggregationParameters] = None
|
||||
non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None
|
||||
agg_time_dimension: Optional[str] = None
|
||||
create_metric: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -723,11 +711,13 @@ class UnparsedDimension(dbtClassMixin):
|
||||
class UnparsedSemanticModel(dbtClassMixin):
|
||||
name: str
|
||||
model: str # looks like "ref(...)"
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
description: Optional[str] = None
|
||||
defaults: Optional[Defaults] = None
|
||||
entities: List[UnparsedEntity] = field(default_factory=list)
|
||||
measures: List[UnparsedMeasure] = field(default_factory=list)
|
||||
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
|
||||
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user