mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-18 23:41:28 +00:00
Compare commits
84 Commits
jerco/upda
...
fix_8031
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
97725ef127 | ||
|
|
aaafbe72be | ||
|
|
41c65126df | ||
|
|
fbd2fc9205 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
feaec5c916 | ||
|
|
126c54f1ec | ||
|
|
756e257de3 | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
baca60f293 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
33a1483e04 | ||
|
|
6a3e8ae8d5 | ||
|
|
8a6824539d | ||
|
|
8e7726d806 | ||
|
|
c652e8c5e1 | ||
|
|
b0451806ef | ||
|
|
9b671809ac | ||
|
|
70749e728d | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
8e801b56a1 | ||
|
|
3d2dd84b93 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.7.0b1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
70
.changes/1.7.0-b1.md
Normal file
70
.changes/1.7.0-b1.md
Normal file
@@ -0,0 +1,70 @@
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
7
.changes/1.7.0/Breaking Changes-20230725-171359.yaml
Normal file
7
.changes/1.7.0/Breaking Changes-20230725-171359.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Breaking Changes
|
||||
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
|
||||
the existing RunResultError in their place.
|
||||
time: 2023-07-25T17:13:59.441682-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7963"
|
||||
6
.changes/1.7.0/Dependencies-20230621-005752.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
6
.changes/1.7.0/Dependencies-20230726-201740.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230726-201740.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.4.0 to 1.4.1"
|
||||
time: 2023-07-26T20:17:40.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8219
|
||||
6
.changes/1.7.0/Dependencies-20230727-145703.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230727-145703.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update pin for click<9
|
||||
time: 2023-07-27T14:57:03.180458-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8232"
|
||||
6
.changes/1.7.0/Dependencies-20230727-145726.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230727-145726.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Add upper bound to sqlparse pin of <0.5
|
||||
time: 2023-07-27T14:57:26.40416-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8236"
|
||||
6
.changes/1.7.0/Dependencies-20230728-135227.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230728-135227.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Support dbt-semantic-interfaces 0.2.0
|
||||
time: 2023-07-28T13:52:27.207241-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "8250"
|
||||
6
.changes/1.7.0/Docs-20230715-200907.yaml
Normal file
6
.changes/1.7.0/Docs-20230715-200907.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
6
.changes/1.7.0/Docs-20230727-170900.yaml
Normal file
6
.changes/1.7.0/Docs-20230727-170900.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fixed comment util.py
|
||||
time: 2023-07-27T17:09:00.089237+09:00
|
||||
custom:
|
||||
Author: d-kaneshiro
|
||||
Issue: None
|
||||
6
.changes/1.7.0/Docs-20230804-131815.yaml
Normal file
6
.changes/1.7.0/Docs-20230804-131815.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Display contract and column constraints on the model page
|
||||
time: 2023-08-04T13:18:15.627005-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "433"
|
||||
6
.changes/1.7.0/Docs-20230807-152548.yaml
Normal file
6
.changes/1.7.0/Docs-20230807-152548.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Display semantic model details in docs
|
||||
time: 2023-08-07T15:25:48.711627-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "431"
|
||||
7
.changes/1.7.0/Features-20230702-122813.yaml
Normal file
7
.changes/1.7.0/Features-20230702-122813.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Enable re-population of metadata vars post-environment change during programmatic
|
||||
invocation
|
||||
time: 2023-07-02T12:28:13.416305-04:00
|
||||
custom:
|
||||
Author: gem7318
|
||||
Issue: "8010"
|
||||
6
.changes/1.7.0/Features-20230714-202445.yaml
Normal file
6
.changes/1.7.0/Features-20230714-202445.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Added support to configure a delimiter for a seed file, defaults to comma
|
||||
time: 2023-07-14T20:24:45.513847165+02:00
|
||||
custom:
|
||||
Author: ramonvermeulen
|
||||
Issue: "3990"
|
||||
6
.changes/1.7.0/Features-20230803-151824.yaml
Normal file
6
.changes/1.7.0/Features-20230803-151824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Allow specification of `create_metric: true` on measures'
|
||||
time: 2023-08-03T15:18:24.351003-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8125"
|
||||
6
.changes/1.7.0/Fixes-20230424-210734.yaml
Normal file
6
.changes/1.7.0/Fixes-20230424-210734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy dir during `dbt deps` if symlink fails
|
||||
time: 2023-04-24T21:07:34.336797+05:30
|
||||
custom:
|
||||
Author: anjutiwari
|
||||
Issue: "7428 8223"
|
||||
6
.changes/1.7.0/Fixes-20230625-142731.yaml
Normal file
6
.changes/1.7.0/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
6
.changes/1.7.0/Fixes-20230717-160652.yaml
Normal file
6
.changes/1.7.0/Fixes-20230717-160652.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy target_schema from config into snapshot node
|
||||
time: 2023-07-17T16:06:52.957724-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6745"
|
||||
6
.changes/1.7.0/Fixes-20230720-122723.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-122723.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
6
.changes/1.7.0/Fixes-20230720-161513.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/1.7.0/Fixes-20230720-170112.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
6
.changes/1.7.0/Fixes-20230720-172422.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
6
.changes/1.7.0/Fixes-20230726-104448.yaml
Normal file
6
.changes/1.7.0/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/1.7.0/Fixes-20230727-125830.yaml
Normal file
6
.changes/1.7.0/Fixes-20230727-125830.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||
time: 2023-07-27T12:58:30.673803-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8230"
|
||||
7
.changes/1.7.0/Fixes-20230728-115620.yaml
Normal file
7
.changes/1.7.0/Fixes-20230728-115620.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||
etc)
|
||||
time: 2023-07-28T11:56:20.863718-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8166"
|
||||
6
.changes/1.7.0/Fixes-20230802-141556.yaml
Normal file
6
.changes/1.7.0/Fixes-20230802-141556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix retry not working with log-file-max-bytes
|
||||
time: 2023-08-02T14:15:56.306027-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8297"
|
||||
6
.changes/1.7.0/Fixes-20230806-222319.yaml
Normal file
6
.changes/1.7.0/Fixes-20230806-222319.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Detect changes to model access, version, or latest_version in state:modified
|
||||
time: 2023-08-06T22:23:19.166334-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8189"
|
||||
6
.changes/1.7.0/Fixes-20230810-184859.yaml
Normal file
6
.changes/1.7.0/Fixes-20230810-184859.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add connection status into list of statuses for dbt debug
|
||||
time: 2023-08-10T18:48:59.221344+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8350"
|
||||
6
.changes/1.7.0/Fixes-20230811-204144.yaml
Normal file
6
.changes/1.7.0/Fixes-20230811-204144.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: fix fqn-selection for external versioned models
|
||||
time: 2023-08-11T20:41:44.725144-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8374"
|
||||
7
.changes/1.7.0/Fixes-20230811-212008.yaml
Normal file
7
.changes/1.7.0/Fixes-20230811-212008.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: 'Fix: DbtInternalError after model that previously ref''d external model is
|
||||
deleted'
|
||||
time: 2023-08-11T21:20:08.145554-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8375"
|
||||
6
.changes/1.7.0/Fixes-20230814-145702.yaml
Normal file
6
.changes/1.7.0/Fixes-20230814-145702.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix using list command with path selector and project-dir
|
||||
time: 2023-08-14T14:57:02.02816-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8385"
|
||||
6
.changes/1.7.0/Fixes-20230815-104444.yaml
Normal file
6
.changes/1.7.0/Fixes-20230815-104444.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remedy performance regression by only writing run_results.json once.
|
||||
time: 2023-08-15T10:44:44.836991-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8360"
|
||||
6
.changes/1.7.0/Fixes-20230817-130915.yaml
Normal file
6
.changes/1.7.0/Fixes-20230817-130915.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Use python version 3.10.7 in Docker image.
|
||||
time: 2023-08-17T13:09:15.936349-05:00
|
||||
custom:
|
||||
Author: McKnight-42
|
||||
Issue: "8444"
|
||||
6
.changes/1.7.0/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
6
.changes/1.7.0/Under the Hood-20230719-163334.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230719-163334.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
6
.changes/1.7.0/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
7
.changes/1.7.0/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
6
.changes/1.7.0/Under the Hood-20230807-164509.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230807-164509.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Bump manifest schema version to v11, freeze manifest v10
|
||||
time: 2023-08-07T16:45:09.712744-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8333"
|
||||
6
.changes/1.7.0/Under the Hood-20230809-094834.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230809-094834.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: add tracking for plugin.get_nodes calls
|
||||
time: 2023-08-09T09:48:34.819445-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8344"
|
||||
7
.changes/1.7.0/Under the Hood-20230811-100902.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230811-100902.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: 'add internal flag: --no-partial-parse-file-diff to inform whether to compute
|
||||
a file diff during partial parsing'
|
||||
time: 2023-08-11T10:09:02.832241-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8363"
|
||||
6
.changes/1.7.0/Under the Hood-20230815-170307.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230815-170307.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add return values to a number of functions for mypy
|
||||
time: 2023-08-15T17:03:07.895252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8389"
|
||||
6
.changes/1.7.0/Under the Hood-20230817-134548.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230817-134548.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fix mypy warnings for ManifestLoader.load()
|
||||
time: 2023-08-17T13:45:48.937252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8401"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix for column tests not rendering on quoted columns
|
||||
time: 2023-05-31T11:54:19.687363-04:00
|
||||
custom:
|
||||
Author: drewbanin
|
||||
Issue: "201"
|
||||
6
.changes/unreleased/Docs-20230728-193438.yaml
Normal file
6
.changes/unreleased/Docs-20230728-193438.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Fix newline escapes and improve formatting in docker README
|
||||
time: 2023-07-28T19:34:38.351042747+02:00
|
||||
custom:
|
||||
Author: jamezrin
|
||||
Issue: "8211"
|
||||
6
.changes/unreleased/Fixes-20230803-093502.yaml
Normal file
6
.changes/unreleased/Fixes-20230803-093502.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add explicit support for integers for the show command
|
||||
time: 2023-08-03T09:35:02.163968-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "8153"
|
||||
6
.changes/unreleased/Fixes-20230810-183216.yaml
Normal file
6
.changes/unreleased/Fixes-20230810-183216.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Call materialization macro from adapter dispatch
|
||||
time: 2023-08-10T18:32:16.226142+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8031"
|
||||
6
.changes/unreleased/Fixes-20230818-095348.yaml
Normal file
6
.changes/unreleased/Fixes-20230818-095348.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension`
|
||||
time: 2023-08-18T09:53:48.154848-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8453"
|
||||
47
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
47
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user_docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Adapters
|
||||
description: |
|
||||
Will this require changes to adapter repos? Include details of the kinds of changes required (new tests, code changes, related tickets)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
37
.github/workflows/docs-issue.yml
vendored
Normal file
37
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
if: contains( github.event.pull_request.labels.*.name, 'user docs') && github.event.pull_request.merged == true
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_labels: "content,improvement,dbt Core"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
84
.github/workflows/main.yml
vendored
84
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -103,26 +108,59 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +203,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -182,8 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
72
CHANGELOG.md
72
CHANGELOG.md
@@ -5,6 +5,78 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.01% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
|
||||
@@ -25,9 +25,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
|
||||
@@ -61,7 +61,6 @@ def args_to_context(args: List[str]) -> Context:
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if isinstance(sub_command, Group):
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
@@ -319,7 +318,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
if k == WHICH_KEY:
|
||||
if v != command.value:
|
||||
@@ -344,7 +342,8 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
elif v in (None, False):
|
||||
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||
elif v is None or v is False:
|
||||
add_fn(f"--no-{spinal_cased}")
|
||||
elif v is True:
|
||||
add_fn(f"--{spinal_cased}")
|
||||
|
||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@@ -140,6 +141,7 @@ class dbtRunner:
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
|
||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
@@ -248,6 +257,14 @@ partial_parse_file_path = click.option(
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
partial_parse_file_diff = click.option(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
@@ -380,9 +397,9 @@ inline = click.option(
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
selector = click.option(
|
||||
"--selector",
|
||||
|
||||
@@ -9,10 +9,20 @@ from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
class Integer(agate.data_types.DataType):
|
||||
def cast(self, d):
|
||||
if type(d) == int:
|
||||
return d
|
||||
else:
|
||||
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
|
||||
|
||||
def jsonify(self, d):
|
||||
return d
|
||||
|
||||
|
||||
class Number(agate.data_types.Number):
|
||||
# undo the change in https://github.com/wireservice/agate/pull/733
|
||||
# i.e. do not cast True and False to numeric 1 and 0
|
||||
@@ -48,6 +58,7 @@ def build_type_tester(
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
Integer(null_values=("null", "")),
|
||||
Number(null_values=("null", "")),
|
||||
agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
|
||||
agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
|
||||
@@ -135,12 +146,12 @@ def as_matrix(table):
|
||||
return [r.values() for r in table.rows.values()]
|
||||
|
||||
|
||||
def from_csv(abspath, text_columns):
|
||||
def from_csv(abspath, text_columns, delimiter=","):
|
||||
type_tester = build_type_tester(text_columns=text_columns)
|
||||
with open(abspath, encoding="utf-8") as fp:
|
||||
if fp.read(1) != BOM:
|
||||
fp.seek(0)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester, delimiter=delimiter)
|
||||
|
||||
|
||||
class _NullMarker:
|
||||
|
||||
@@ -191,7 +191,7 @@ NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore
|
||||
|
||||
|
||||
class TemplateCache:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.file_cache: Dict[str, jinja2.Template] = {}
|
||||
|
||||
def get_node_template(self, node) -> jinja2.Template:
|
||||
@@ -271,10 +271,11 @@ class MacroStack(threading.local):
|
||||
def push(self, name):
|
||||
self.call_stack.append(name)
|
||||
|
||||
def pop(self, name):
|
||||
def pop(self, name: Optional[str] = None):
|
||||
got = self.call_stack.pop()
|
||||
if got != name:
|
||||
if name and got != name:
|
||||
raise DbtInternalError(f"popped {got}, expected {name}")
|
||||
return got
|
||||
|
||||
|
||||
class MacroGenerator(BaseMacroGenerator):
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -378,16 +378,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +523,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -40,7 +40,7 @@ class MacroResolver:
|
||||
self._build_internal_packages_namespace()
|
||||
self._build_macros_by_name()
|
||||
|
||||
def _build_internal_packages_namespace(self):
|
||||
def _build_internal_packages_namespace(self) -> None:
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
# in the list are the ones we want to "win".
|
||||
self.internal_packages_namespace: MacroNamespace = {}
|
||||
@@ -56,7 +56,7 @@ class MacroResolver:
|
||||
# root package namespace
|
||||
# non-internal packages (that aren't local or root)
|
||||
# dbt internal packages
|
||||
def _build_macros_by_name(self):
|
||||
def _build_macros_by_name(self) -> None:
|
||||
macros_by_name = {}
|
||||
|
||||
# all internal packages (already in the right order)
|
||||
@@ -78,7 +78,7 @@ class MacroResolver:
|
||||
self,
|
||||
package_namespaces: Dict[str, MacroNamespace],
|
||||
macro: Macro,
|
||||
):
|
||||
) -> None:
|
||||
if macro.package_name in package_namespaces:
|
||||
namespace = package_namespaces[macro.package_name]
|
||||
else:
|
||||
@@ -89,7 +89,7 @@ class MacroResolver:
|
||||
raise DuplicateMacroNameError(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: Macro):
|
||||
def add_macro(self, macro: Macro) -> None:
|
||||
macro_name: str = macro.name
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
@@ -103,11 +103,11 @@ class MacroResolver:
|
||||
if macro.package_name == self.root_project_name:
|
||||
self.root_package_macros[macro_name] = macro
|
||||
|
||||
def add_macros(self):
|
||||
def add_macros(self) -> None:
|
||||
for macro in self.macros.values():
|
||||
self.add_macro(macro)
|
||||
|
||||
def get_macro(self, local_package, macro_name):
|
||||
def get_macro(self, local_package, macro_name) -> Optional[Macro]:
|
||||
local_package_macros = {}
|
||||
# If the macro is explicitly prefixed with an internal namespace
|
||||
# (e.g. 'dbt.some_macro'), look there first
|
||||
@@ -125,7 +125,7 @@ class MacroResolver:
|
||||
return self.macros_by_name[macro_name]
|
||||
return None
|
||||
|
||||
def get_macro_id(self, local_package, macro_name):
|
||||
def get_macro_id(self, local_package, macro_name) -> Optional[str]:
|
||||
macro = self.get_macro(local_package, macro_name)
|
||||
if macro is None:
|
||||
return None
|
||||
|
||||
@@ -11,13 +11,18 @@ from typing import (
|
||||
Type,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
import agate
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from dbt import selected_resources
|
||||
from dbt.adapters.base.column import Column
|
||||
from dbt.adapters.factory import get_adapter, get_adapter_package_names, get_adapter_type_names
|
||||
from dbt.clients import agate_helper
|
||||
from dbt.clients.jinja import get_rendered, MacroGenerator, MacroStack
|
||||
from dbt.config import IsFQNResource
|
||||
from dbt.config import RuntimeConfig, Project
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.context.base import contextmember, contextproperty, Var
|
||||
@@ -29,6 +34,7 @@ from dbt.context.macros import MacroNamespaceBuilder, MacroNamespace
|
||||
from dbt.context.manifest import ManifestContext
|
||||
from dbt.contracts.connection import AdapterResponse
|
||||
from dbt.contracts.graph.manifest import Manifest, Disabled
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Exposure,
|
||||
@@ -40,7 +46,6 @@ from dbt.contracts.graph.nodes import (
|
||||
AccessType,
|
||||
SemanticModel,
|
||||
)
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.contracts.graph.unparsed import NodeVersion
|
||||
from dbt.events.functions import get_metadata_vars
|
||||
from dbt.exceptions import (
|
||||
@@ -69,16 +74,9 @@ from dbt.exceptions import (
|
||||
DbtValidationError,
|
||||
DbtReferenceError,
|
||||
)
|
||||
from dbt.config import IsFQNResource
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
|
||||
from dbt.utils import merge, AttrDict, MultiDict, args_to_dict, cast_to_str
|
||||
|
||||
from dbt import selected_resources
|
||||
|
||||
import agate
|
||||
|
||||
|
||||
_MISSING = object()
|
||||
|
||||
|
||||
@@ -156,6 +154,7 @@ class BaseDatabaseWrapper:
|
||||
self,
|
||||
macro_name: str,
|
||||
macro_namespace: Optional[str] = None,
|
||||
stack: Optional[MacroStack] = None,
|
||||
packages: Optional[List[str]] = None, # eventually remove since it's fully deprecated
|
||||
) -> MacroGenerator:
|
||||
search_packages: List[Optional[str]]
|
||||
@@ -174,30 +173,40 @@ class BaseDatabaseWrapper:
|
||||
raise MacroDispatchArgError(macro_name)
|
||||
|
||||
search_packages = self._get_search_packages(macro_namespace)
|
||||
|
||||
attempts = []
|
||||
macro = None
|
||||
potential_macros: List[Tuple[Optional[str], str]] = []
|
||||
failed_macros: List[Tuple[Optional[str], str]] = []
|
||||
|
||||
for package_name in search_packages:
|
||||
for prefix in self._get_adapter_macro_prefixes():
|
||||
search_name = f"{prefix}__{macro_name}"
|
||||
try:
|
||||
# this uses the namespace from the context
|
||||
macro = self._namespace.get_from_package(package_name, search_name)
|
||||
except CompilationError:
|
||||
# Only raise CompilationError if macro is not found in
|
||||
# any package
|
||||
macro = None
|
||||
if macro_name.startswith("materialization_"):
|
||||
potential_macros.append((package_name, macro_name))
|
||||
potential_macros.append(("dbt", macro_name))
|
||||
else:
|
||||
for prefix in self._get_adapter_macro_prefixes():
|
||||
potential_macros.append((package_name, f"{prefix}__{macro_name}"))
|
||||
potential_macros.append(
|
||||
(package_name, f"materialization_{macro_name}_{prefix}")
|
||||
)
|
||||
|
||||
if package_name is None:
|
||||
attempts.append(search_name)
|
||||
else:
|
||||
attempts.append(f"{package_name}.{search_name}")
|
||||
|
||||
if macro is not None:
|
||||
for package_name, search_name in potential_macros:
|
||||
try:
|
||||
macro = self._namespace.get_from_package(package_name, search_name)
|
||||
if macro:
|
||||
macro.stack = stack
|
||||
except CompilationError:
|
||||
# Only raise CompilationError if macro is not found in
|
||||
# any package
|
||||
pass
|
||||
finally:
|
||||
if macro:
|
||||
return macro
|
||||
else:
|
||||
failed_macros.append((package_name, search_name))
|
||||
|
||||
searched = ", ".join(repr(a) for a in attempts)
|
||||
msg = f"In dispatch: No macro named '{macro_name}' found within namespace: '{macro_namespace}'\n Searched for: {searched}"
|
||||
msg = (
|
||||
f"In dispatch: No macro named '{macro_name}' found within namespace: '{macro_namespace}'\n"
|
||||
f"Searched for: {failed_macros}"
|
||||
)
|
||||
raise CompilationError(msg)
|
||||
|
||||
|
||||
@@ -865,8 +874,9 @@ class ProviderContext(ManifestContext):
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
column_types = self.model.config.column_types
|
||||
delimiter = self.model.config.delimiter
|
||||
try:
|
||||
table = agate_helper.from_csv(path, text_columns=column_types)
|
||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||
except ValueError as e:
|
||||
raise LoadAgateTableValueError(e, node=self.model)
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
|
||||
@@ -225,6 +225,8 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
# metrics generated from semantic_model measures
|
||||
generated_metrics: List[str] = field(default_factory=list)
|
||||
groups: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -1331,10 +1331,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.exposures[exposure.unique_id] = exposure
|
||||
source_file.exposures.append(exposure.unique_id)
|
||||
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric):
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric, generated: bool = False):
|
||||
_check_duplicates(metric, self.metrics)
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
if not generated:
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
else:
|
||||
source_file.generated_metrics.append(metric.unique_id)
|
||||
|
||||
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||
_check_duplicates(group, self.groups)
|
||||
@@ -1422,7 +1425,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 10)
|
||||
@schema_version("manifest", 11)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1486,6 +1489,7 @@ class WritableManifest(ArtifactMixin):
|
||||
("manifest", 7),
|
||||
("manifest", 8),
|
||||
("manifest", 9),
|
||||
("manifest", 10),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
@@ -1493,7 +1497,7 @@ class WritableManifest(ArtifactMixin):
|
||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
|
||||
manifest_schema_version = get_manifest_schema_version(data)
|
||||
if manifest_schema_version <= 9:
|
||||
if manifest_schema_version <= 10:
|
||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||
return cls.from_dict(data)
|
||||
|
||||
|
||||
@@ -544,6 +544,7 @@ class NodeConfig(NodeAndTestConfig):
|
||||
@dataclass
|
||||
class SeedConfig(NodeConfig):
|
||||
materialized: str = "seed"
|
||||
delimiter: str = ","
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
@@ -619,6 +620,8 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super().validate(data)
|
||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||
# will fail when parsing the snapshot node.
|
||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||
raise ValidationError(
|
||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||
@@ -649,6 +652,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||
|
||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||
def finalize_and_validate(self):
|
||||
data = self.to_dict(omit_none=True)
|
||||
self.validate(data)
|
||||
|
||||
@@ -29,3 +29,11 @@ class ModelNodeArgs:
|
||||
unique_id = f"{unique_id}.v{self.version}"
|
||||
|
||||
return unique_id
|
||||
|
||||
@property
|
||||
def fqn(self) -> List[str]:
|
||||
fqn = [self.package_name, self.name]
|
||||
if self.version:
|
||||
fqn.append(f"v{self.version}")
|
||||
|
||||
return fqn
|
||||
|
||||
@@ -50,6 +50,7 @@ from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||
from dbt_semantic_interfaces.references import (
|
||||
EntityReference,
|
||||
MeasureReference,
|
||||
LinkableElementReference,
|
||||
SemanticModelReference,
|
||||
@@ -589,7 +590,7 @@ class ModelNode(CompiledNode):
|
||||
name=args.name,
|
||||
package_name=args.package_name,
|
||||
unique_id=unique_id,
|
||||
fqn=[args.package_name, args.name],
|
||||
fqn=args.fqn,
|
||||
version=args.version,
|
||||
latest_version=args.latest_version,
|
||||
relation_name=args.relation_name,
|
||||
@@ -625,6 +626,18 @@ class ModelNode(CompiledNode):
|
||||
def materialization_enforces_constraints(self) -> bool:
|
||||
return self.config.materialized in ["table", "incremental"]
|
||||
|
||||
def same_contents(self, old, adapter_type) -> bool:
|
||||
return super().same_contents(old, adapter_type) and self.same_ref_representation(old)
|
||||
|
||||
def same_ref_representation(self, old) -> bool:
|
||||
return (
|
||||
# Changing the latest_version may break downstream unpinned refs
|
||||
self.latest_version == old.latest_version
|
||||
# Changes to access or deprecation_date may lead to ref-related parsing errors
|
||||
and self.access == old.access
|
||||
and self.deprecation_date == old.deprecation_date
|
||||
)
|
||||
|
||||
def build_contract_checksum(self):
|
||||
# We don't need to construct the checksum if the model does not
|
||||
# have contract enforced, because it won't be used.
|
||||
@@ -1498,6 +1511,7 @@ class SemanticModel(GraphNode):
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
@property
|
||||
def entity_references(self) -> List[LinkableElementReference]:
|
||||
@@ -1568,17 +1582,26 @@ class SemanticModel(GraphNode):
|
||||
measure is not None
|
||||
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
|
||||
|
||||
if self.defaults is not None:
|
||||
default_agg_time_dimesion = self.defaults.agg_time_dimension
|
||||
default_agg_time_dimension = (
|
||||
self.defaults.agg_time_dimension if self.defaults is not None else None
|
||||
)
|
||||
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimesion
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension
|
||||
assert agg_time_dimension_name is not None, (
|
||||
f"Aggregation time dimension for measure {measure.name} is not set! This should either be set directly on "
|
||||
f"the measure specification in the model, or else defaulted to the primary time dimension in the data "
|
||||
f"source containing the measure."
|
||||
f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! "
|
||||
"To fix this either specify a default `agg_time_dimension` for the semantic model or define an "
|
||||
"`agg_time_dimension` on the measure directly."
|
||||
)
|
||||
return TimeDimensionReference(element_name=agg_time_dimension_name)
|
||||
|
||||
@property
|
||||
def primary_entity_reference(self) -> Optional[EntityReference]:
|
||||
return (
|
||||
EntityReference(element_name=self.primary_entity)
|
||||
if self.primary_entity is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
|
||||
@@ -220,7 +220,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
||||
deprecation_date: Optional[datetime.datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
def __post_init__(self) -> None:
|
||||
if self.latest_version:
|
||||
version_values = [version.v for version in self.versions]
|
||||
if self.latest_version not in version_values:
|
||||
@@ -228,7 +228,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
|
||||
)
|
||||
|
||||
seen_versions: set[str] = set()
|
||||
seen_versions = set()
|
||||
for version in self.versions:
|
||||
if str(version.v) in seen_versions:
|
||||
raise ParsingError(
|
||||
@@ -689,7 +689,7 @@ class UnparsedEntity(dbtClassMixin):
|
||||
class UnparsedNonAdditiveDimension(dbtClassMixin):
|
||||
name: str
|
||||
window_choice: str # AggregationType enum
|
||||
window_groupings: List[str]
|
||||
window_groupings: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -701,6 +701,7 @@ class UnparsedMeasure(dbtClassMixin):
|
||||
agg_params: Optional[MeasureAggregationParameters] = None
|
||||
non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None
|
||||
agg_time_dimension: Optional[str] = None
|
||||
create_metric: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -728,6 +729,7 @@ class UnparsedSemanticModel(dbtClassMixin):
|
||||
entities: List[UnparsedEntity] = field(default_factory=list)
|
||||
measures: List[UnparsedMeasure] = field(default_factory=list)
|
||||
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
|
||||
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
||||
|
||||
@@ -258,6 +258,13 @@ class ArtifactMixin(VersionedSchema, Writable, Readable):
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
"""Our definition of a valid Identifier is the same as what's valid for an unquoted database table name.
|
||||
|
||||
That is:
|
||||
1. It can contain a-z, A-Z, 0-9, and _
|
||||
1. It cannot start with a number
|
||||
"""
|
||||
|
||||
ValidationRegex = r"^[^\d\W]\w*$"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -51,19 +51,15 @@ class LocalPinnedPackage(LocalPackageMixin, PinnedPackage):
|
||||
src_path = self.resolve_path(project)
|
||||
dest_path = self.get_installation_path(project, renderer)
|
||||
|
||||
can_create_symlink = system.supports_symlinks()
|
||||
|
||||
if system.path_exists(dest_path):
|
||||
if not system.path_is_symlink(dest_path):
|
||||
system.rmdir(dest_path)
|
||||
else:
|
||||
system.remove_file(dest_path)
|
||||
|
||||
if can_create_symlink:
|
||||
try:
|
||||
fire_event(DepsCreatingLocalSymlink())
|
||||
system.make_symlink(src_path, dest_path)
|
||||
|
||||
else:
|
||||
except OSError:
|
||||
fire_event(DepsSymlinkNotAvailable())
|
||||
shutil.copytree(src_path, dest_path)
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@ import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import threading
|
||||
import traceback
|
||||
from typing import Any, Callable, List, Optional, TextIO
|
||||
from typing import Any, Callable, List, Optional, TextIO, Protocol
|
||||
from uuid import uuid4
|
||||
from dbt.events.format import timestamp_to_datetime_string
|
||||
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg
|
||||
|
||||
import dbt.utils
|
||||
|
||||
# A Filter is a function which takes a BaseEvent and returns True if the event
|
||||
# should be logged, False otherwise.
|
||||
@@ -80,6 +80,7 @@ class LoggerConfig:
|
||||
use_colors: bool = False
|
||||
output_stream: Optional[TextIO] = None
|
||||
output_file_name: Optional[str] = None
|
||||
output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb
|
||||
logger: Optional[Any] = None
|
||||
|
||||
|
||||
@@ -100,7 +101,7 @@ class _Logger:
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
maxBytes=config.output_file_max_bytes, # type: ignore
|
||||
backupCount=5,
|
||||
)
|
||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||
@@ -175,7 +176,7 @@ class _JsonLogger(_Logger):
|
||||
from dbt.events.functions import msg_to_dict
|
||||
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
line = self.scrubber(raw_log_line) # type: ignore
|
||||
return line
|
||||
|
||||
@@ -205,7 +206,7 @@ class EventManager:
|
||||
for callback in self.callbacks:
|
||||
callback(msg)
|
||||
|
||||
def add_logger(self, config: LoggerConfig):
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
logger = (
|
||||
_JsonLogger(self, config)
|
||||
if config.line_format == LineFormat.Json
|
||||
@@ -217,3 +218,25 @@ class EventManager:
|
||||
def flush(self):
|
||||
for logger in self.loggers:
|
||||
logger.flush()
|
||||
|
||||
|
||||
class IEventManager(Protocol):
|
||||
callbacks: List[Callable[[EventMsg], None]]
|
||||
invocation_id: str
|
||||
|
||||
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
...
|
||||
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
...
|
||||
|
||||
|
||||
class TestEventManager(IEventManager):
|
||||
def __init__(self):
|
||||
self.event_history = []
|
||||
|
||||
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
self.event_history.append((e, level))
|
||||
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from dbt.constants import METADATA_ENV_PREFIX
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, EventMsg
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter, IEventManager
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import Formatting, Note
|
||||
from dbt.events.types import Note
|
||||
from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER
|
||||
from dbt.logger import GLOBAL_LOGGER, make_log_dir_if_missing
|
||||
from functools import partial
|
||||
@@ -13,6 +13,7 @@ from typing import Callable, Dict, List, Optional, TextIO
|
||||
import uuid
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
|
||||
import dbt.utils
|
||||
|
||||
LOG_VERSION = 3
|
||||
metadata_vars: Optional[Dict[str, str]] = None
|
||||
@@ -67,7 +68,11 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
||||
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logfile_config(
|
||||
log_file, flags.USE_COLORS_FILE, log_file_format, log_level_file
|
||||
log_file,
|
||||
flags.USE_COLORS_FILE,
|
||||
log_file_format,
|
||||
log_level_file,
|
||||
flags.LOG_FILE_MAX_BYTES,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -110,13 +115,15 @@ def _stdout_filter(
|
||||
line_format: LineFormat,
|
||||
msg: EventMsg,
|
||||
) -> bool:
|
||||
return (msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events) and not (
|
||||
line_format == LineFormat.Json and type(msg.data) == Formatting
|
||||
)
|
||||
return msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events
|
||||
|
||||
|
||||
def _get_logfile_config(
|
||||
log_path: str, use_colors: bool, line_format: LineFormat, level: EventLevel
|
||||
log_path: str,
|
||||
use_colors: bool,
|
||||
line_format: LineFormat,
|
||||
level: EventLevel,
|
||||
log_file_max_bytes: int,
|
||||
) -> LoggerConfig:
|
||||
return LoggerConfig(
|
||||
name="file_log",
|
||||
@@ -126,14 +133,13 @@ def _get_logfile_config(
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
||||
output_file_name=log_path,
|
||||
output_file_max_bytes=log_file_max_bytes,
|
||||
)
|
||||
|
||||
|
||||
def _logfile_filter(log_cache_events: bool, line_format: LineFormat, msg: EventMsg) -> bool:
|
||||
return (
|
||||
msg.info.code not in nofile_codes
|
||||
and not (msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events)
|
||||
and not (line_format == LineFormat.Json and type(msg.data) == Formatting)
|
||||
return msg.info.code not in nofile_codes and not (
|
||||
msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events
|
||||
)
|
||||
|
||||
|
||||
@@ -172,7 +178,7 @@ def cleanup_event_logger():
|
||||
# Since dbt-rpc does not do its own log setup, and since some events can
|
||||
# currently fire before logs can be configured by setup_event_logger(), we
|
||||
# create a default configuration with default settings and no file output.
|
||||
EVENT_MANAGER: EventManager = EventManager()
|
||||
EVENT_MANAGER: IEventManager = EventManager()
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logbook_log_config(False, True, False, False) # type: ignore
|
||||
if ENABLE_LEGACY_LOGGER
|
||||
@@ -200,7 +206,7 @@ def stop_capture_stdout_logs():
|
||||
# the message may contain secrets which must be scrubbed at the usage site.
|
||||
def msg_to_json(msg: EventMsg) -> str:
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
return raw_log_line
|
||||
|
||||
|
||||
@@ -263,7 +269,7 @@ def fire_event(e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
|
||||
def get_metadata_vars() -> Dict[str, str]:
|
||||
global metadata_vars
|
||||
if metadata_vars is None:
|
||||
if not metadata_vars:
|
||||
metadata_vars = {
|
||||
k[len(METADATA_ENV_PREFIX) :]: v
|
||||
for k, v in os.environ.items()
|
||||
@@ -285,3 +291,8 @@ def set_invocation_id() -> None:
|
||||
# This is primarily for setting the invocation_id for separate
|
||||
# commands in the dbt servers. It shouldn't be necessary for the CLI.
|
||||
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
|
||||
|
||||
|
||||
def ctx_set_event_manager(event_manager: IEventManager):
|
||||
global EVENT_MANAGER
|
||||
EVENT_MANAGER = event_manager
|
||||
|
||||
@@ -2245,25 +2245,7 @@ message CheckNodeTestFailureMsg {
|
||||
CheckNodeTestFailure data = 2;
|
||||
}
|
||||
|
||||
// Z028
|
||||
message FirstRunResultError {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message FirstRunResultErrorMsg {
|
||||
EventInfo info = 1;
|
||||
FirstRunResultError data = 2;
|
||||
}
|
||||
|
||||
// Z029
|
||||
message AfterFirstRunResultError {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message AfterFirstRunResultErrorMsg {
|
||||
EventInfo info = 1;
|
||||
AfterFirstRunResultError data = 2;
|
||||
}
|
||||
// Skipped Z028, Z029
|
||||
|
||||
// Z030
|
||||
message EndOfRunSummary {
|
||||
|
||||
@@ -2171,25 +2171,7 @@ class CheckNodeTestFailure(InfoLevel):
|
||||
return f" See test failures:\n {border}\n {msg}\n {border}"
|
||||
|
||||
|
||||
# FirstRunResultError and AfterFirstRunResultError are just splitting the message from the result
|
||||
# object into multiple log lines
|
||||
# TODO: is this reallly needed? See printer.py
|
||||
|
||||
|
||||
class FirstRunResultError(ErrorLevel):
|
||||
def code(self):
|
||||
return "Z028"
|
||||
|
||||
def message(self) -> str:
|
||||
return yellow(self.msg)
|
||||
|
||||
|
||||
class AfterFirstRunResultError(ErrorLevel):
|
||||
def code(self):
|
||||
return "Z029"
|
||||
|
||||
def message(self) -> str:
|
||||
return self.msg
|
||||
# Skipped Z028, Z029
|
||||
|
||||
|
||||
class EndOfRunSummary(InfoLevel):
|
||||
|
||||
@@ -486,7 +486,7 @@ class InvalidConnectionError(DbtRuntimeError):
|
||||
self.thread_id = thread_id
|
||||
self.known = known
|
||||
super().__init__(
|
||||
msg="connection never acquired for thread {self.thread_id}, have {self.known}"
|
||||
msg=f"connection never acquired for thread {self.thread_id}, have {self.known}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric]
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def __init__(
|
||||
self, manifest: Manifest, previous_state: Optional[PreviousState], arguments: List[str]
|
||||
):
|
||||
) -> None:
|
||||
self.manifest: Manifest = manifest
|
||||
self.previous_state = previous_state
|
||||
self.arguments: List[str] = arguments
|
||||
@@ -467,7 +467,7 @@ class TestTypeSelectorMethod(SelectorMethod):
|
||||
|
||||
|
||||
class StateSelectorMethod(SelectorMethod):
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.modified_macros: Optional[List[str]] = None
|
||||
|
||||
|
||||
@@ -63,3 +63,12 @@
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_relations() %}
|
||||
{{ return(adapter.dispatch('get_relations', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_relations() %}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'get_relations macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
{% set day_count = (end_date - start_date).days %}
|
||||
{% if day_count < 0 %}
|
||||
{% set msg -%}
|
||||
Partiton start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
Partition start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
{%- endset %}
|
||||
|
||||
{{ exceptions.raise_compiler_error(msg, model) }}
|
||||
|
||||
@@ -33,7 +33,12 @@
|
||||
|
||||
-- cleanup
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
@@ -45,7 +45,12 @@
|
||||
-- cleanup
|
||||
-- move the existing view out of the way
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -4,8 +4,8 @@ from dbt.dataclass_schema import StrEnum
|
||||
|
||||
|
||||
class AccessType(StrEnum):
|
||||
Protected = "protected"
|
||||
Private = "private"
|
||||
Protected = "protected"
|
||||
Public = "public"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -102,8 +102,7 @@ class RelationUpdate:
|
||||
self.package_updaters = package_updaters
|
||||
self.component = component
|
||||
|
||||
def __call__(self, parsed_node: Any, config_dict: Dict[str, Any]) -> None:
|
||||
override = config_dict.get(self.component)
|
||||
def __call__(self, parsed_node: Any, override: Optional[str]) -> None:
|
||||
if parsed_node.package_name in self.package_updaters:
|
||||
new_value = self.package_updaters[parsed_node.package_name](override, parsed_node)
|
||||
else:
|
||||
@@ -280,9 +279,19 @@ class ConfiguredParser(
|
||||
def update_parsed_node_relation_names(
|
||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
||||
) -> None:
|
||||
self._update_node_database(parsed_node, config_dict)
|
||||
self._update_node_schema(parsed_node, config_dict)
|
||||
self._update_node_alias(parsed_node, config_dict)
|
||||
|
||||
# These call the RelationUpdate callable to go through generate_name macros
|
||||
self._update_node_database(parsed_node, config_dict.get("database"))
|
||||
self._update_node_schema(parsed_node, config_dict.get("schema"))
|
||||
self._update_node_alias(parsed_node, config_dict.get("alias"))
|
||||
|
||||
# Snapshot nodes use special "target_database" and "target_schema" fields for some reason
|
||||
if parsed_node.resource_type == NodeType.Snapshot:
|
||||
if "target_database" in config_dict and config_dict["target_database"]:
|
||||
parsed_node.database = config_dict["target_database"]
|
||||
if "target_schema" in config_dict and config_dict["target_schema"]:
|
||||
parsed_node.schema = config_dict["target_schema"]
|
||||
|
||||
self._update_node_relation_name(parsed_node)
|
||||
|
||||
def update_parsed_node_config(
|
||||
@@ -349,7 +358,7 @@ class ConfiguredParser(
|
||||
# do this once before we parse the node database/schema/alias, so
|
||||
# parsed_node.config is what it would be if they did nothing
|
||||
self.update_parsed_node_config_dict(parsed_node, config_dict)
|
||||
# This updates the node database/schema/alias
|
||||
# This updates the node database/schema/alias/relation_name
|
||||
self.update_parsed_node_relation_names(parsed_node, config_dict)
|
||||
|
||||
# tests don't have hooks
|
||||
|
||||
@@ -177,10 +177,10 @@ class GenericTestBlock(TestBlock[Testable], Generic[Testable]):
|
||||
class ParserRef:
|
||||
"""A helper object to hold parse-time references."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.column_info: Dict[str, ColumnInfo] = {}
|
||||
|
||||
def _add(self, column: HasColumnProps):
|
||||
def _add(self, column: HasColumnProps) -> None:
|
||||
tags: List[str] = []
|
||||
tags.extend(getattr(column, "tags", ()))
|
||||
quote: Optional[bool]
|
||||
|
||||
@@ -81,7 +81,7 @@ class MacroParser(BaseParser[Macro]):
|
||||
name: str = macro.name.replace(MACRO_PREFIX, "")
|
||||
node = self.parse_macro(block, base_node, name)
|
||||
# get supported_languages for materialization macro
|
||||
if "materialization" in name:
|
||||
if block.block_type_name == "materialization":
|
||||
node.supported_languages = jinja.get_supported_languages(macro)
|
||||
yield node
|
||||
|
||||
|
||||
@@ -79,6 +79,7 @@ from dbt.parser.read_files import (
|
||||
load_source_file,
|
||||
FileDiff,
|
||||
ReadFilesFromDiff,
|
||||
ReadFiles,
|
||||
)
|
||||
from dbt.parser.partial import PartialParsing, special_override_macros
|
||||
from dbt.contracts.graph.manifest import (
|
||||
@@ -122,7 +123,7 @@ from dbt.parser.sources import SourcePatcher
|
||||
from dbt.version import __version__
|
||||
|
||||
from dbt.dataclass_schema import StrEnum, dbtClassMixin
|
||||
from dbt.plugins import get_plugin_manager
|
||||
from dbt import plugins
|
||||
|
||||
from dbt_semantic_interfaces.enum_extension import assert_values_exhausted
|
||||
from dbt_semantic_interfaces.type_enums import MetricType
|
||||
@@ -259,7 +260,7 @@ class ManifestLoader:
|
||||
# We need to know if we're actually partially parsing. It could
|
||||
# have been enabled, but not happening because of some issue.
|
||||
self.partially_parsing = False
|
||||
self.partial_parser = None
|
||||
self.partial_parser: Optional[PartialParsing] = None
|
||||
|
||||
# This is a saved manifest from a previous run that's used for partial parsing
|
||||
self.saved_manifest: Optional[Manifest] = self.read_manifest_for_partial_parse()
|
||||
@@ -284,8 +285,17 @@ class ManifestLoader:
|
||||
adapter.clear_macro_manifest()
|
||||
macro_hook = adapter.connections.set_query_header
|
||||
|
||||
flags = get_flags()
|
||||
if not flags.PARTIAL_PARSE_FILE_DIFF:
|
||||
file_diff = FileDiff.from_dict(
|
||||
{
|
||||
"deleted": [],
|
||||
"changed": [],
|
||||
"added": [],
|
||||
}
|
||||
)
|
||||
# Hack to test file_diffs
|
||||
if os.environ.get("DBT_PP_FILE_DIFF_TEST"):
|
||||
elif os.environ.get("DBT_PP_FILE_DIFF_TEST"):
|
||||
file_diff_path = "file_diff.json"
|
||||
if path_exists(file_diff_path):
|
||||
file_diff_dct = read_json(file_diff_path)
|
||||
@@ -322,7 +332,7 @@ class ManifestLoader:
|
||||
return manifest
|
||||
|
||||
# This is where the main action happens
|
||||
def load(self):
|
||||
def load(self) -> Manifest:
|
||||
start_read_files = time.perf_counter()
|
||||
|
||||
# This updates the "files" dictionary in self.manifest, and creates
|
||||
@@ -331,6 +341,7 @@ class ManifestLoader:
|
||||
# of parsers to lists of file strings. The file strings are
|
||||
# used to get the SourceFiles from the manifest files.
|
||||
saved_files = self.saved_manifest.files if self.saved_manifest else {}
|
||||
file_reader: Optional[ReadFiles] = None
|
||||
if self.file_diff:
|
||||
# We're getting files from a file diff
|
||||
file_reader = ReadFilesFromDiff(
|
||||
@@ -394,7 +405,7 @@ class ManifestLoader:
|
||||
}
|
||||
|
||||
# get file info for local logs
|
||||
parse_file_type = None
|
||||
parse_file_type: str = ""
|
||||
file_id = self.partial_parser.processing_file
|
||||
if file_id:
|
||||
source_file = None
|
||||
@@ -475,7 +486,7 @@ class ManifestLoader:
|
||||
self.manifest.rebuild_disabled_lookup()
|
||||
|
||||
# Load yaml files
|
||||
parser_types = [SchemaParser]
|
||||
parser_types = [SchemaParser] # type: ignore
|
||||
for project in self.all_projects.values():
|
||||
if project.project_name not in project_parser_files:
|
||||
continue
|
||||
@@ -503,6 +514,7 @@ class ManifestLoader:
|
||||
self.manifest.selectors = self.root_project.manifest_selectors
|
||||
|
||||
# inject any available external nodes
|
||||
self.manifest.build_parent_and_child_maps()
|
||||
external_nodes_modified = self.inject_external_nodes()
|
||||
if external_nodes_modified:
|
||||
self.manifest.rebuild_ref_lookup()
|
||||
@@ -751,7 +763,7 @@ class ManifestLoader:
|
||||
manifest_nodes_modified = True
|
||||
|
||||
# Inject any newly-available external nodes
|
||||
pm = get_plugin_manager(self.root_project.project_name)
|
||||
pm = plugins.get_plugin_manager(self.root_project.project_name)
|
||||
plugin_model_nodes = pm.get_nodes().models
|
||||
for node_arg in plugin_model_nodes.values():
|
||||
node = ModelNode.from_args(node_arg)
|
||||
@@ -1052,7 +1064,7 @@ class ManifestLoader:
|
||||
|
||||
# Takes references in 'refs' array of nodes and exposures, finds the target
|
||||
# node, and updates 'depends_on.nodes' with the unique id
|
||||
def process_refs(self, current_project: str, dependencies: Optional[Dict[str, Project]]):
|
||||
def process_refs(self, current_project: str, dependencies: Optional[Mapping[str, Project]]):
|
||||
for node in self.manifest.nodes.values():
|
||||
if node.created_at < self.started_at:
|
||||
continue
|
||||
|
||||
@@ -497,12 +497,10 @@ class ModelParser(SimpleSQLParser[ModelNode]):
|
||||
# set refs and sources on the node object
|
||||
refs: List[RefArgs] = []
|
||||
for ref in statically_parsed["refs"]:
|
||||
if len(ref) == 1:
|
||||
package, name = None, ref[0]
|
||||
else:
|
||||
package, name = ref
|
||||
|
||||
refs.append(RefArgs(package=package, name=name))
|
||||
name = ref.get("name")
|
||||
package = ref.get("package")
|
||||
version = ref.get("version")
|
||||
refs.append(RefArgs(name, package, version))
|
||||
|
||||
node.refs += refs
|
||||
node.sources += statically_parsed["sources"]
|
||||
|
||||
@@ -895,6 +895,14 @@ class PartialParsing:
|
||||
elif unique_id in self.saved_manifest.disabled:
|
||||
self.delete_disabled(unique_id, schema_file.file_id)
|
||||
|
||||
metrics = schema_file.generated_metrics.copy()
|
||||
for unique_id in metrics:
|
||||
if unique_id in self.saved_manifest.metrics:
|
||||
self.saved_manifest.metrics.pop(unique_id)
|
||||
schema_file.generated_metrics.remove(unique_id)
|
||||
elif unique_id in self.saved_manifest.disabled:
|
||||
self.delete_disabled(unique_id, schema_file.file_id)
|
||||
|
||||
def get_schema_element(self, elem_list, elem_name):
|
||||
for element in elem_list:
|
||||
if "name" in element and element["name"] == elem_name:
|
||||
|
||||
@@ -16,9 +16,10 @@ from dbt.dataclass_schema import dbtClassMixin
|
||||
from dbt.parser.schemas import yaml_from_file, schema_file_keys
|
||||
from dbt.exceptions import ParsingError
|
||||
from dbt.parser.search import filesystem_search
|
||||
from typing import Optional, Dict, List, Mapping
|
||||
from typing import Optional, Dict, List, Mapping, MutableMapping
|
||||
from dbt.events.types import InputFileDiffError
|
||||
from dbt.events.functions import fire_event
|
||||
from typing import Protocol
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -173,12 +174,21 @@ def generate_dbt_ignore_spec(project_root):
|
||||
return ignore_spec
|
||||
|
||||
|
||||
# Protocol for the ReadFiles... classes
|
||||
class ReadFiles(Protocol):
|
||||
files: MutableMapping[str, AnySourceFile]
|
||||
project_parser_files: Dict
|
||||
|
||||
def read_files(self):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReadFilesFromFileSystem:
|
||||
all_projects: Mapping[str, Project]
|
||||
files: Dict[str, AnySourceFile] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
# saved_files is only used to compare schema files
|
||||
saved_files: Dict[str, AnySourceFile] = field(default_factory=dict)
|
||||
saved_files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
# project_parser_files = {
|
||||
# "my_project": {
|
||||
# "ModelParser": ["my_project://models/my_model.sql"]
|
||||
@@ -212,10 +222,10 @@ class ReadFilesFromDiff:
|
||||
root_project_name: str
|
||||
all_projects: Mapping[str, Project]
|
||||
file_diff: FileDiff
|
||||
files: Dict[str, AnySourceFile] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
# saved_files is used to construct a fresh copy of files, without
|
||||
# additional information from parsing
|
||||
saved_files: Dict[str, AnySourceFile] = field(default_factory=dict)
|
||||
saved_files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
project_parser_files: Dict = field(default_factory=dict)
|
||||
project_file_types: Dict = field(default_factory=dict)
|
||||
local_package_dirs: Optional[List[str]] = None
|
||||
|
||||
@@ -303,7 +303,7 @@ class MetricParser(YamlReader):
|
||||
# input_measures=?,
|
||||
)
|
||||
|
||||
def parse_metric(self, unparsed: UnparsedMetric):
|
||||
def parse_metric(self, unparsed: UnparsedMetric, generated: bool = False):
|
||||
package_name = self.project.project_name
|
||||
unique_id = f"{NodeType.Metric}.{package_name}.{unparsed.name}"
|
||||
path = self.yaml.path.relative_path
|
||||
@@ -358,7 +358,7 @@ class MetricParser(YamlReader):
|
||||
|
||||
# if the metric is disabled we do not want it included in the manifest, only in the disabled dict
|
||||
if parsed.config.enabled:
|
||||
self.manifest.add_metric(self.yaml.file, parsed)
|
||||
self.manifest.add_metric(self.yaml.file, parsed, generated)
|
||||
else:
|
||||
self.manifest.add_disabled(self.yaml.file, parsed)
|
||||
|
||||
@@ -509,6 +509,19 @@ class SemanticModelParser(YamlReader):
|
||||
)
|
||||
return measures
|
||||
|
||||
def _create_metric(self, measure: UnparsedMeasure, enabled: bool) -> None:
|
||||
unparsed_metric = UnparsedMetric(
|
||||
name=measure.name,
|
||||
label=measure.name,
|
||||
type="simple",
|
||||
type_params=UnparsedMetricTypeParams(measure=measure.name, expr=measure.name),
|
||||
description=measure.description or f"Metric created from measure {measure.name}",
|
||||
config={"enabled": enabled},
|
||||
)
|
||||
|
||||
parser = MetricParser(self.schema_parser, yaml=self.yaml)
|
||||
parser.parse_metric(unparsed=unparsed_metric, generated=True)
|
||||
|
||||
def parse_semantic_model(self, unparsed: UnparsedSemanticModel):
|
||||
package_name = self.project.project_name
|
||||
unique_id = f"{NodeType.SemanticModel}.{package_name}.{unparsed.name}"
|
||||
@@ -532,6 +545,7 @@ class SemanticModelParser(YamlReader):
|
||||
measures=self._get_measures(unparsed.measures),
|
||||
dimensions=self._get_dimensions(unparsed.dimensions),
|
||||
defaults=unparsed.defaults,
|
||||
primary_entity=unparsed.primary_entity,
|
||||
)
|
||||
|
||||
ctx = generate_parse_semantic_models(
|
||||
@@ -549,6 +563,11 @@ class SemanticModelParser(YamlReader):
|
||||
# No ability to disable a semantic model at this time
|
||||
self.manifest.add_semantic_model(self.yaml.file, parsed)
|
||||
|
||||
# Create a metric for each measure with `create_metric = True`
|
||||
for measure in unparsed.measures:
|
||||
if measure.create_metric is True:
|
||||
self._create_metric(measure=measure, enabled=parsed.config.enabled)
|
||||
|
||||
def parse(self):
|
||||
for data in self.get_key_dicts():
|
||||
try:
|
||||
|
||||
@@ -6,6 +6,7 @@ from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.plugins.contracts import PluginArtifacts
|
||||
from dbt.plugins.manifest import PluginNodes
|
||||
import dbt.tracking
|
||||
|
||||
|
||||
def dbt_hook(func):
|
||||
@@ -29,8 +30,11 @@ class dbtPlugin:
|
||||
self.project_name = project_name
|
||||
try:
|
||||
self.initialize()
|
||||
except DbtRuntimeError as e:
|
||||
# Remove the first line of DbtRuntimeError to avoid redundant "Runtime Error" line
|
||||
raise DbtRuntimeError("\n".join(str(e).split("\n")[1:]))
|
||||
except Exception as e:
|
||||
raise DbtRuntimeError(f"initialize: {e}")
|
||||
raise DbtRuntimeError(str(e))
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -116,5 +120,14 @@ class PluginManager:
|
||||
all_plugin_nodes = PluginNodes()
|
||||
for hook_method in self.hooks.get("get_nodes", []):
|
||||
plugin_nodes = hook_method()
|
||||
dbt.tracking.track_plugin_get_nodes(
|
||||
{
|
||||
"plugin_name": hook_method.__self__.name, # type: ignore
|
||||
"num_model_nodes": len(plugin_nodes.models),
|
||||
"num_model_packages": len(
|
||||
{model.package_name for model in plugin_nodes.models.values()}
|
||||
),
|
||||
}
|
||||
)
|
||||
all_plugin_nodes.update(plugin_nodes)
|
||||
return all_plugin_nodes
|
||||
|
||||
@@ -107,7 +107,7 @@ class VersionSpecifier(VersionSpecification):
|
||||
def __str__(self):
|
||||
return self.to_version_string()
|
||||
|
||||
def to_range(self):
|
||||
def to_range(self) -> "VersionRange":
|
||||
range_start: VersionSpecifier = UnboundedVersionSpecifier()
|
||||
range_end: VersionSpecifier = UnboundedVersionSpecifier()
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import threading
|
||||
from typing import AbstractSet, Any, List, Iterable, Set
|
||||
|
||||
from dbt.adapters.base import BaseRelation
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.context.providers import generate_runtime_model_context
|
||||
from dbt.contracts.results import RunStatus, RunResult
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
@@ -80,7 +79,9 @@ class CloneRunner(BaseRunner):
|
||||
|
||||
hook_ctx = self.adapter.pre_model_hook(context_config)
|
||||
try:
|
||||
result = MacroGenerator(materialization_macro, context)()
|
||||
result = context["adapter"].dispatch(
|
||||
materialization_macro.name, stack=context["context_macro_stack"]
|
||||
)()
|
||||
finally:
|
||||
self.adapter.post_model_hook(context_config, hook_ctx)
|
||||
|
||||
|
||||
@@ -139,6 +139,7 @@ class CompileTask(GraphRunnableTask):
|
||||
"node_path": "sql/inline_query",
|
||||
"node_name": "inline_query",
|
||||
"unique_id": "sqloperation.test.inline_query",
|
||||
"node_status": "failed",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@@ -74,7 +74,7 @@ class DebugRunStatus(Flag):
|
||||
|
||||
|
||||
class DebugTask(BaseTask):
|
||||
def __init__(self, args, config):
|
||||
def __init__(self, args, config) -> None:
|
||||
super().__init__(args, config)
|
||||
self.profiles_dir = args.PROFILES_DIR
|
||||
self.profile_path = os.path.join(self.profiles_dir, "profiles.yml")
|
||||
@@ -149,13 +149,14 @@ class DebugTask(BaseTask):
|
||||
dependencies_statuses = self.test_dependencies()
|
||||
|
||||
# Test connection
|
||||
self.test_connection()
|
||||
connection_status = self.test_connection()
|
||||
|
||||
# Log messages from any fails
|
||||
all_statuses: List[SubtaskStatus] = [
|
||||
load_profile_status,
|
||||
load_project_status,
|
||||
*dependencies_statuses,
|
||||
connection_status,
|
||||
]
|
||||
all_failing_statuses: List[SubtaskStatus] = list(
|
||||
filter(lambda status: status.run_status == RunStatus.Error, all_statuses)
|
||||
|
||||
@@ -15,6 +15,7 @@ from dbt.events.types import (
|
||||
ListCmdOut,
|
||||
)
|
||||
from dbt.exceptions import DbtRuntimeError, DbtInternalError
|
||||
from dbt.events.contextvars import task_contextvars
|
||||
|
||||
|
||||
class ListTask(GraphRunnableTask):
|
||||
@@ -123,20 +124,23 @@ class ListTask(GraphRunnableTask):
|
||||
yield node.original_file_path
|
||||
|
||||
def run(self):
|
||||
self.compile_manifest()
|
||||
output = self.args.output
|
||||
if output == "selector":
|
||||
generator = self.generate_selectors
|
||||
elif output == "name":
|
||||
generator = self.generate_names
|
||||
elif output == "json":
|
||||
generator = self.generate_json
|
||||
elif output == "path":
|
||||
generator = self.generate_paths
|
||||
else:
|
||||
raise DbtInternalError("Invalid output {}".format(output))
|
||||
# We set up a context manager here with "task_contextvars" because we
|
||||
# we need the project_root in compile_manifest.
|
||||
with task_contextvars(project_root=self.config.project_root):
|
||||
self.compile_manifest()
|
||||
output = self.args.output
|
||||
if output == "selector":
|
||||
generator = self.generate_selectors
|
||||
elif output == "name":
|
||||
generator = self.generate_names
|
||||
elif output == "json":
|
||||
generator = self.generate_json
|
||||
elif output == "path":
|
||||
generator = self.generate_paths
|
||||
else:
|
||||
raise DbtInternalError("Invalid output {}".format(output))
|
||||
|
||||
return self.output_results(generator())
|
||||
return self.output_results(generator())
|
||||
|
||||
def output_results(self, results):
|
||||
"""Log, or output a plain, newline-delimited, and ready-to-pipe list of nodes found."""
|
||||
|
||||
@@ -14,8 +14,6 @@ from dbt.events.types import (
|
||||
RunResultErrorNoMessage,
|
||||
SQLCompiledPath,
|
||||
CheckNodeTestFailure,
|
||||
FirstRunResultError,
|
||||
AfterFirstRunResultError,
|
||||
EndOfRunSummary,
|
||||
)
|
||||
|
||||
@@ -118,15 +116,7 @@ def print_run_result_error(result, newline: bool = True, is_warning: bool = Fals
|
||||
fire_event(CheckNodeTestFailure(relation_name=result.node.relation_name))
|
||||
|
||||
elif result.message is not None:
|
||||
first = True
|
||||
for line in result.message.split("\n"):
|
||||
# TODO: why do we format like this? Is there a reason this needs to
|
||||
# be split instead of sending it as a single log line?
|
||||
if first:
|
||||
fire_event(FirstRunResultError(msg=line))
|
||||
first = False
|
||||
else:
|
||||
fire_event(AfterFirstRunResultError(msg=line))
|
||||
fire_event(RunResultError(msg=result.message))
|
||||
|
||||
|
||||
def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user