mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 04:21:28 +00:00
Compare commits
75 Commits
codecov_ma
...
remove-ssl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59542f37e0 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.7.0b1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
70
.changes/1.7.0-b1.md
Normal file
70
.changes/1.7.0-b1.md
Normal file
@@ -0,0 +1,70 @@
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
7
.changes/1.7.0/Breaking Changes-20230725-171359.yaml
Normal file
7
.changes/1.7.0/Breaking Changes-20230725-171359.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Breaking Changes
|
||||
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
|
||||
the existing RunResultError in their place.
|
||||
time: 2023-07-25T17:13:59.441682-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7963"
|
||||
6
.changes/1.7.0/Dependencies-20230621-005752.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
6
.changes/1.7.0/Dependencies-20230726-201740.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230726-201740.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.4.0 to 1.4.1"
|
||||
time: 2023-07-26T20:17:40.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8219
|
||||
6
.changes/1.7.0/Dependencies-20230727-145703.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230727-145703.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update pin for click<9
|
||||
time: 2023-07-27T14:57:03.180458-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8232"
|
||||
6
.changes/1.7.0/Dependencies-20230727-145726.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230727-145726.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Add upper bound to sqlparse pin of <0.5
|
||||
time: 2023-07-27T14:57:26.40416-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8236"
|
||||
6
.changes/1.7.0/Dependencies-20230728-135227.yaml
Normal file
6
.changes/1.7.0/Dependencies-20230728-135227.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Support dbt-semantic-interfaces 0.2.0
|
||||
time: 2023-07-28T13:52:27.207241-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "8250"
|
||||
6
.changes/1.7.0/Docs-20230715-200907.yaml
Normal file
6
.changes/1.7.0/Docs-20230715-200907.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
6
.changes/1.7.0/Docs-20230727-170900.yaml
Normal file
6
.changes/1.7.0/Docs-20230727-170900.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fixed comment util.py
|
||||
time: 2023-07-27T17:09:00.089237+09:00
|
||||
custom:
|
||||
Author: d-kaneshiro
|
||||
Issue: None
|
||||
6
.changes/1.7.0/Docs-20230804-131815.yaml
Normal file
6
.changes/1.7.0/Docs-20230804-131815.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Display contract and column constraints on the model page
|
||||
time: 2023-08-04T13:18:15.627005-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "433"
|
||||
6
.changes/1.7.0/Docs-20230807-152548.yaml
Normal file
6
.changes/1.7.0/Docs-20230807-152548.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Display semantic model details in docs
|
||||
time: 2023-08-07T15:25:48.711627-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "431"
|
||||
7
.changes/1.7.0/Features-20230702-122813.yaml
Normal file
7
.changes/1.7.0/Features-20230702-122813.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Enable re-population of metadata vars post-environment change during programmatic
|
||||
invocation
|
||||
time: 2023-07-02T12:28:13.416305-04:00
|
||||
custom:
|
||||
Author: gem7318
|
||||
Issue: "8010"
|
||||
6
.changes/1.7.0/Features-20230714-202445.yaml
Normal file
6
.changes/1.7.0/Features-20230714-202445.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Added support to configure a delimiter for a seed file, defaults to comma
|
||||
time: 2023-07-14T20:24:45.513847165+02:00
|
||||
custom:
|
||||
Author: ramonvermeulen
|
||||
Issue: "3990"
|
||||
6
.changes/1.7.0/Features-20230803-151824.yaml
Normal file
6
.changes/1.7.0/Features-20230803-151824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Allow specification of `create_metric: true` on measures'
|
||||
time: 2023-08-03T15:18:24.351003-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8125"
|
||||
6
.changes/1.7.0/Fixes-20230424-210734.yaml
Normal file
6
.changes/1.7.0/Fixes-20230424-210734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy dir during `dbt deps` if symlink fails
|
||||
time: 2023-04-24T21:07:34.336797+05:30
|
||||
custom:
|
||||
Author: anjutiwari
|
||||
Issue: "7428 8223"
|
||||
6
.changes/1.7.0/Fixes-20230625-142731.yaml
Normal file
6
.changes/1.7.0/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
6
.changes/1.7.0/Fixes-20230717-160652.yaml
Normal file
6
.changes/1.7.0/Fixes-20230717-160652.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy target_schema from config into snapshot node
|
||||
time: 2023-07-17T16:06:52.957724-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6745"
|
||||
6
.changes/1.7.0/Fixes-20230720-122723.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-122723.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
6
.changes/1.7.0/Fixes-20230720-161513.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/1.7.0/Fixes-20230720-170112.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
6
.changes/1.7.0/Fixes-20230720-172422.yaml
Normal file
6
.changes/1.7.0/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
6
.changes/1.7.0/Fixes-20230726-104448.yaml
Normal file
6
.changes/1.7.0/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/1.7.0/Fixes-20230727-125830.yaml
Normal file
6
.changes/1.7.0/Fixes-20230727-125830.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||
time: 2023-07-27T12:58:30.673803-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8230"
|
||||
7
.changes/1.7.0/Fixes-20230728-115620.yaml
Normal file
7
.changes/1.7.0/Fixes-20230728-115620.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||
etc)
|
||||
time: 2023-07-28T11:56:20.863718-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8166"
|
||||
6
.changes/1.7.0/Fixes-20230802-141556.yaml
Normal file
6
.changes/1.7.0/Fixes-20230802-141556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix retry not working with log-file-max-bytes
|
||||
time: 2023-08-02T14:15:56.306027-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8297"
|
||||
6
.changes/1.7.0/Fixes-20230806-222319.yaml
Normal file
6
.changes/1.7.0/Fixes-20230806-222319.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Detect changes to model access, version, or latest_version in state:modified
|
||||
time: 2023-08-06T22:23:19.166334-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8189"
|
||||
6
.changes/1.7.0/Fixes-20230810-184859.yaml
Normal file
6
.changes/1.7.0/Fixes-20230810-184859.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add connection status into list of statuses for dbt debug
|
||||
time: 2023-08-10T18:48:59.221344+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8350"
|
||||
6
.changes/1.7.0/Fixes-20230811-204144.yaml
Normal file
6
.changes/1.7.0/Fixes-20230811-204144.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: fix fqn-selection for external versioned models
|
||||
time: 2023-08-11T20:41:44.725144-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8374"
|
||||
7
.changes/1.7.0/Fixes-20230811-212008.yaml
Normal file
7
.changes/1.7.0/Fixes-20230811-212008.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: 'Fix: DbtInternalError after model that previously ref''d external model is
|
||||
deleted'
|
||||
time: 2023-08-11T21:20:08.145554-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8375"
|
||||
6
.changes/1.7.0/Fixes-20230814-145702.yaml
Normal file
6
.changes/1.7.0/Fixes-20230814-145702.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix using list command with path selector and project-dir
|
||||
time: 2023-08-14T14:57:02.02816-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8385"
|
||||
6
.changes/1.7.0/Fixes-20230815-104444.yaml
Normal file
6
.changes/1.7.0/Fixes-20230815-104444.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remedy performance regression by only writing run_results.json once.
|
||||
time: 2023-08-15T10:44:44.836991-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8360"
|
||||
6
.changes/1.7.0/Fixes-20230817-130915.yaml
Normal file
6
.changes/1.7.0/Fixes-20230817-130915.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Use python version 3.10.7 in Docker image.
|
||||
time: 2023-08-17T13:09:15.936349-05:00
|
||||
custom:
|
||||
Author: McKnight-42
|
||||
Issue: "8444"
|
||||
6
.changes/1.7.0/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
6
.changes/1.7.0/Under the Hood-20230719-163334.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230719-163334.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
6
.changes/1.7.0/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
7
.changes/1.7.0/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
6
.changes/1.7.0/Under the Hood-20230807-164509.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230807-164509.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Bump manifest schema version to v11, freeze manifest v10
|
||||
time: 2023-08-07T16:45:09.712744-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8333"
|
||||
6
.changes/1.7.0/Under the Hood-20230809-094834.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230809-094834.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: add tracking for plugin.get_nodes calls
|
||||
time: 2023-08-09T09:48:34.819445-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8344"
|
||||
7
.changes/1.7.0/Under the Hood-20230811-100902.yaml
Normal file
7
.changes/1.7.0/Under the Hood-20230811-100902.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: 'add internal flag: --no-partial-parse-file-diff to inform whether to compute
|
||||
a file diff during partial parsing'
|
||||
time: 2023-08-11T10:09:02.832241-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8363"
|
||||
6
.changes/1.7.0/Under the Hood-20230815-170307.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230815-170307.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add return values to a number of functions for mypy
|
||||
time: 2023-08-15T17:03:07.895252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8389"
|
||||
6
.changes/1.7.0/Under the Hood-20230817-134548.yaml
Normal file
6
.changes/1.7.0/Under the Hood-20230817-134548.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fix mypy warnings for ManifestLoader.load()
|
||||
time: 2023-08-17T13:45:48.937252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8401"
|
||||
6
.changes/unreleased/Docs-20230728-193438.yaml
Normal file
6
.changes/unreleased/Docs-20230728-193438.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Fix newline escapes and improve formatting in docker README
|
||||
time: 2023-07-28T19:34:38.351042747+02:00
|
||||
custom:
|
||||
Author: jamezrin
|
||||
Issue: "8211"
|
||||
6
.changes/unreleased/Fixes-20230818-095348.yaml
Normal file
6
.changes/unreleased/Fixes-20230818-095348.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension`
|
||||
time: 2023-08-18T09:53:48.154848-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8453"
|
||||
6
.changes/unreleased/Fixes-20230818-103802.yaml
Normal file
6
.changes/unreleased/Fixes-20230818-103802.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Turn breaking changes to contracted models into warnings for unversioned models
|
||||
time: 2023-08-18T10:38:02.251286-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: 8384 8282
|
||||
7
.changes/unreleased/Fixes-20230824-161024.yaml
Normal file
7
.changes/unreleased/Fixes-20230824-161024.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: fix ambiguous reference error for tests and versions when model name is duplicated across
|
||||
packages
|
||||
time: 2023-08-24T16:10:24.437362-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8327 8493"
|
||||
7
.changes/unreleased/Under the Hood-20230821-134801.yaml
Normal file
7
.changes/unreleased/Under the Hood-20230821-134801.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: 'Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>,
|
||||
relation agnostic in /macros/relations'
|
||||
time: 2023-08-21T13:48:01.474731-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8449"
|
||||
6
.changes/unreleased/Under the Hood-20230823-194237.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230823-194237.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Update typing to meet mypy standards
|
||||
time: 2023-08-23T19:42:37.130694-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8396"
|
||||
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,7 +17,7 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
37
.github/workflows/docs-issue.yml
vendored
Normal file
37
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
if: contains( github.event.pull_request.labels.*.name, 'user docs') && github.event.pull_request.merged == true
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_labels: "content,improvement,dbt Core"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
84
.github/workflows/main.yml
vendored
84
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -103,26 +108,59 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +203,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -182,8 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
72
CHANGELOG.md
72
CHANGELOG.md
@@ -5,6 +5,78 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.01% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
|
||||
@@ -400,7 +400,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
@@ -408,7 +408,28 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param int limit: If set, limits the result set
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
"""
|
||||
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||
|
||||
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`add_select_query` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -43,7 +43,7 @@ from dbt.exceptions import (
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.adapters.protocol import AdapterConfig
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
@@ -60,7 +60,7 @@ from dbt.events.types import (
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName,
|
||||
@@ -208,7 +208,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Relation: Type[BaseRelation] = BaseRelation
|
||||
Column: Type[BaseColumn] = BaseColumn
|
||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
||||
ConnectionManager: Type[BaseConnectionManager]
|
||||
|
||||
# A set of clobber config fields accepted by this adapter
|
||||
# for use in materializations
|
||||
@@ -315,14 +315,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
"""
|
||||
TODO: Can we move this to dbt-bigquery?
|
||||
Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:param str table: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
if hasattr(self.connections, "get_partitions_metadata"):
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
@@ -453,9 +460,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||
for relation in cache_schemas:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
if relation.schema:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(
|
||||
|
||||
@@ -25,9 +25,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
|
||||
import agate
|
||||
|
||||
@@ -131,14 +131,6 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
|
||||
@@ -61,7 +61,6 @@ def args_to_context(args: List[str]) -> Context:
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if isinstance(sub_command, Group):
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
@@ -319,7 +318,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
if k == WHICH_KEY:
|
||||
if v != command.value:
|
||||
@@ -344,7 +342,8 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
elif v in (None, False):
|
||||
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||
elif v is None or v is False:
|
||||
add_fn(f"--no-{spinal_cased}")
|
||||
elif v is True:
|
||||
add_fn(f"--{spinal_cased}")
|
||||
|
||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@@ -140,6 +141,7 @@ class dbtRunner:
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
|
||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
@@ -248,6 +257,14 @@ partial_parse_file_path = click.option(
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
partial_parse_file_diff = click.option(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
@@ -380,9 +397,9 @@ inline = click.option(
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
selector = click.option(
|
||||
"--selector",
|
||||
|
||||
@@ -9,7 +9,6 @@ from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
@@ -135,12 +134,12 @@ def as_matrix(table):
|
||||
return [r.values() for r in table.rows.values()]
|
||||
|
||||
|
||||
def from_csv(abspath, text_columns):
|
||||
def from_csv(abspath, text_columns, delimiter=","):
|
||||
type_tester = build_type_tester(text_columns=text_columns)
|
||||
with open(abspath, encoding="utf-8") as fp:
|
||||
if fp.read(1) != BOM:
|
||||
fp.seek(0)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester, delimiter=delimiter)
|
||||
|
||||
|
||||
class _NullMarker:
|
||||
|
||||
@@ -191,7 +191,7 @@ NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore
|
||||
|
||||
|
||||
class TemplateCache:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.file_cache: Dict[str, jinja2.Template] = {}
|
||||
|
||||
def get_node_template(self, node) -> jinja2.Template:
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -378,16 +378,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +523,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -40,7 +40,7 @@ class MacroResolver:
|
||||
self._build_internal_packages_namespace()
|
||||
self._build_macros_by_name()
|
||||
|
||||
def _build_internal_packages_namespace(self):
|
||||
def _build_internal_packages_namespace(self) -> None:
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
# in the list are the ones we want to "win".
|
||||
self.internal_packages_namespace: MacroNamespace = {}
|
||||
@@ -56,7 +56,7 @@ class MacroResolver:
|
||||
# root package namespace
|
||||
# non-internal packages (that aren't local or root)
|
||||
# dbt internal packages
|
||||
def _build_macros_by_name(self):
|
||||
def _build_macros_by_name(self) -> None:
|
||||
macros_by_name = {}
|
||||
|
||||
# all internal packages (already in the right order)
|
||||
@@ -78,7 +78,7 @@ class MacroResolver:
|
||||
self,
|
||||
package_namespaces: Dict[str, MacroNamespace],
|
||||
macro: Macro,
|
||||
):
|
||||
) -> None:
|
||||
if macro.package_name in package_namespaces:
|
||||
namespace = package_namespaces[macro.package_name]
|
||||
else:
|
||||
@@ -89,7 +89,7 @@ class MacroResolver:
|
||||
raise DuplicateMacroNameError(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: Macro):
|
||||
def add_macro(self, macro: Macro) -> None:
|
||||
macro_name: str = macro.name
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
@@ -103,11 +103,11 @@ class MacroResolver:
|
||||
if macro.package_name == self.root_project_name:
|
||||
self.root_package_macros[macro_name] = macro
|
||||
|
||||
def add_macros(self):
|
||||
def add_macros(self) -> None:
|
||||
for macro in self.macros.values():
|
||||
self.add_macro(macro)
|
||||
|
||||
def get_macro(self, local_package, macro_name):
|
||||
def get_macro(self, local_package, macro_name) -> Optional[Macro]:
|
||||
local_package_macros = {}
|
||||
# If the macro is explicitly prefixed with an internal namespace
|
||||
# (e.g. 'dbt.some_macro'), look there first
|
||||
@@ -125,7 +125,7 @@ class MacroResolver:
|
||||
return self.macros_by_name[macro_name]
|
||||
return None
|
||||
|
||||
def get_macro_id(self, local_package, macro_name):
|
||||
def get_macro_id(self, local_package, macro_name) -> Optional[str]:
|
||||
macro = self.get_macro(local_package, macro_name)
|
||||
if macro is None:
|
||||
return None
|
||||
|
||||
@@ -865,8 +865,9 @@ class ProviderContext(ManifestContext):
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
column_types = self.model.config.column_types
|
||||
delimiter = self.model.config.delimiter
|
||||
try:
|
||||
table = agate_helper.from_csv(path, text_columns=column_types)
|
||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||
except ValueError as e:
|
||||
raise LoadAgateTableValueError(e, node=self.model)
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
|
||||
@@ -225,6 +225,8 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
# metrics generated from semantic_model measures
|
||||
generated_metrics: List[str] = field(default_factory=list)
|
||||
groups: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -1331,10 +1331,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.exposures[exposure.unique_id] = exposure
|
||||
source_file.exposures.append(exposure.unique_id)
|
||||
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric):
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric, generated: bool = False):
|
||||
_check_duplicates(metric, self.metrics)
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
if not generated:
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
else:
|
||||
source_file.generated_metrics.append(metric.unique_id)
|
||||
|
||||
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||
_check_duplicates(group, self.groups)
|
||||
@@ -1422,7 +1425,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 10)
|
||||
@schema_version("manifest", 11)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1486,6 +1489,7 @@ class WritableManifest(ArtifactMixin):
|
||||
("manifest", 7),
|
||||
("manifest", 8),
|
||||
("manifest", 9),
|
||||
("manifest", 10),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
@@ -1493,7 +1497,7 @@ class WritableManifest(ArtifactMixin):
|
||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
|
||||
manifest_schema_version = get_manifest_schema_version(data)
|
||||
if manifest_schema_version <= 9:
|
||||
if manifest_schema_version <= 10:
|
||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||
return cls.from_dict(data)
|
||||
|
||||
|
||||
@@ -544,6 +544,7 @@ class NodeConfig(NodeAndTestConfig):
|
||||
@dataclass
|
||||
class SeedConfig(NodeConfig):
|
||||
materialized: str = "seed"
|
||||
delimiter: str = ","
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
@@ -619,6 +620,8 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super().validate(data)
|
||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||
# will fail when parsing the snapshot node.
|
||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||
raise ValidationError(
|
||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||
@@ -649,6 +652,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||
|
||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||
def finalize_and_validate(self):
|
||||
data = self.to_dict(omit_none=True)
|
||||
self.validate(data)
|
||||
|
||||
@@ -29,3 +29,11 @@ class ModelNodeArgs:
|
||||
unique_id = f"{unique_id}.v{self.version}"
|
||||
|
||||
return unique_id
|
||||
|
||||
@property
|
||||
def fqn(self) -> List[str]:
|
||||
fqn = [self.package_name, self.name]
|
||||
if self.version:
|
||||
fqn.append(f"v{self.version}")
|
||||
|
||||
return fqn
|
||||
|
||||
@@ -44,12 +44,14 @@ from dbt.events.types import (
|
||||
SeedExceedsLimitSamePath,
|
||||
SeedExceedsLimitAndPathChanged,
|
||||
SeedExceedsLimitChecksumChanged,
|
||||
UnversionedBreakingChange,
|
||||
)
|
||||
from dbt.events.contextvars import set_log_contextvars
|
||||
from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||
from dbt_semantic_interfaces.references import (
|
||||
EntityReference,
|
||||
MeasureReference,
|
||||
LinkableElementReference,
|
||||
SemanticModelReference,
|
||||
@@ -589,7 +591,7 @@ class ModelNode(CompiledNode):
|
||||
name=args.name,
|
||||
package_name=args.package_name,
|
||||
unique_id=unique_id,
|
||||
fqn=[args.package_name, args.name],
|
||||
fqn=args.fqn,
|
||||
version=args.version,
|
||||
latest_version=args.latest_version,
|
||||
relation_name=args.relation_name,
|
||||
@@ -625,6 +627,18 @@ class ModelNode(CompiledNode):
|
||||
def materialization_enforces_constraints(self) -> bool:
|
||||
return self.config.materialized in ["table", "incremental"]
|
||||
|
||||
def same_contents(self, old, adapter_type) -> bool:
|
||||
return super().same_contents(old, adapter_type) and self.same_ref_representation(old)
|
||||
|
||||
def same_ref_representation(self, old) -> bool:
|
||||
return (
|
||||
# Changing the latest_version may break downstream unpinned refs
|
||||
self.latest_version == old.latest_version
|
||||
# Changes to access or deprecation_date may lead to ref-related parsing errors
|
||||
and self.access == old.access
|
||||
and self.deprecation_date == old.deprecation_date
|
||||
)
|
||||
|
||||
def build_contract_checksum(self):
|
||||
# We don't need to construct the checksum if the model does not
|
||||
# have contract enforced, because it won't be used.
|
||||
@@ -669,11 +683,11 @@ class ModelNode(CompiledNode):
|
||||
# These are the categories of breaking changes:
|
||||
contract_enforced_disabled: bool = False
|
||||
columns_removed: List[str] = []
|
||||
column_type_changes: List[Tuple[str, str, str]] = []
|
||||
enforced_column_constraint_removed: List[Tuple[str, str]] = [] # column, constraint_type
|
||||
enforced_model_constraint_removed: List[
|
||||
Tuple[str, List[str]]
|
||||
] = [] # constraint_type, columns
|
||||
column_type_changes: List[Dict[str, str]] = []
|
||||
enforced_column_constraint_removed: List[
|
||||
Dict[str, str]
|
||||
] = [] # column_name, constraint_type
|
||||
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
|
||||
materialization_changed: List[str] = []
|
||||
|
||||
if old.contract.enforced is True and self.contract.enforced is False:
|
||||
@@ -695,11 +709,11 @@ class ModelNode(CompiledNode):
|
||||
# Has this column's data type changed?
|
||||
elif old_value.data_type != self.columns[old_key].data_type:
|
||||
column_type_changes.append(
|
||||
(
|
||||
str(old_value.name),
|
||||
str(old_value.data_type),
|
||||
str(self.columns[old_key].data_type),
|
||||
)
|
||||
{
|
||||
"column_name": str(old_value.name),
|
||||
"previous_column_type": str(old_value.data_type),
|
||||
"current_column_type": str(self.columns[old_key].data_type),
|
||||
}
|
||||
)
|
||||
|
||||
# track if there are any column level constraints for the materialization check late
|
||||
@@ -720,7 +734,11 @@ class ModelNode(CompiledNode):
|
||||
and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED
|
||||
):
|
||||
enforced_column_constraint_removed.append(
|
||||
(old_key, str(old_constraint.type))
|
||||
{
|
||||
"column_name": old_key,
|
||||
"constraint_name": old_constraint.name,
|
||||
"constraint_type": ConstraintType(old_constraint.type),
|
||||
}
|
||||
)
|
||||
|
||||
# Now compare the model level constraints
|
||||
@@ -731,7 +749,11 @@ class ModelNode(CompiledNode):
|
||||
and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED
|
||||
):
|
||||
enforced_model_constraint_removed.append(
|
||||
(str(old_constraint.type), old_constraint.columns)
|
||||
{
|
||||
"constraint_name": old_constraint.name,
|
||||
"constraint_type": ConstraintType(old_constraint.type),
|
||||
"columns": old_constraint.columns,
|
||||
}
|
||||
)
|
||||
|
||||
# Check for relevant materialization changes.
|
||||
@@ -745,7 +767,8 @@ class ModelNode(CompiledNode):
|
||||
# If a column has been added, it will be missing in the old.columns, and present in self.columns
|
||||
# That's a change (caught by the different checksums), but not a breaking change
|
||||
|
||||
# Did we find any changes that we consider breaking? If so, that's an error
|
||||
# Did we find any changes that we consider breaking? If there's an enforced contract, that's
|
||||
# a warning unless the model is versioned, then it's an error.
|
||||
if (
|
||||
contract_enforced_disabled
|
||||
or columns_removed
|
||||
@@ -754,21 +777,78 @@ class ModelNode(CompiledNode):
|
||||
or enforced_column_constraint_removed
|
||||
or materialization_changed
|
||||
):
|
||||
raise (
|
||||
ContractBreakingChangeError(
|
||||
contract_enforced_disabled=contract_enforced_disabled,
|
||||
columns_removed=columns_removed,
|
||||
column_type_changes=column_type_changes,
|
||||
enforced_column_constraint_removed=enforced_column_constraint_removed,
|
||||
enforced_model_constraint_removed=enforced_model_constraint_removed,
|
||||
materialization_changed=materialization_changed,
|
||||
|
||||
breaking_changes = []
|
||||
if contract_enforced_disabled:
|
||||
breaking_changes.append(
|
||||
"Contract enforcement was removed: Previously, this model had an enforced contract. It is no longer configured to enforce its contract, and this is a breaking change."
|
||||
)
|
||||
if columns_removed:
|
||||
columns_removed_str = "\n - ".join(columns_removed)
|
||||
breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}")
|
||||
if column_type_changes:
|
||||
column_type_changes_str = "\n - ".join(
|
||||
[
|
||||
f"{c['column_name']} ({c['previous_column_type']} -> {c['current_column_type']})"
|
||||
for c in column_type_changes
|
||||
]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Columns with data_type changes: \n - {column_type_changes_str}"
|
||||
)
|
||||
if enforced_column_constraint_removed:
|
||||
column_constraint_changes_str = "\n - ".join(
|
||||
[
|
||||
f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on column {c['column_name']}"
|
||||
for c in enforced_column_constraint_removed
|
||||
]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}"
|
||||
)
|
||||
if enforced_model_constraint_removed:
|
||||
model_constraint_changes_str = "\n - ".join(
|
||||
[
|
||||
f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on columns {c['columns']}"
|
||||
for c in enforced_model_constraint_removed
|
||||
]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}"
|
||||
)
|
||||
if materialization_changed:
|
||||
materialization_changes_str = (
|
||||
f"{materialization_changed[0]} -> {materialization_changed[1]}"
|
||||
)
|
||||
|
||||
breaking_changes.append(
|
||||
f"Materialization changed with enforced constraints: \n - {materialization_changes_str}"
|
||||
)
|
||||
|
||||
if self.version is None:
|
||||
warn_or_error(
|
||||
UnversionedBreakingChange(
|
||||
contract_enforced_disabled=contract_enforced_disabled,
|
||||
columns_removed=columns_removed,
|
||||
column_type_changes=column_type_changes,
|
||||
enforced_column_constraint_removed=enforced_column_constraint_removed,
|
||||
enforced_model_constraint_removed=enforced_model_constraint_removed,
|
||||
breaking_changes=breaking_changes,
|
||||
model_name=self.name,
|
||||
model_file_path=self.original_file_path,
|
||||
),
|
||||
node=self,
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise (
|
||||
ContractBreakingChangeError(
|
||||
breaking_changes=breaking_changes,
|
||||
node=self,
|
||||
)
|
||||
)
|
||||
|
||||
# Otherwise, though we didn't find any *breaking* changes, the contract has still changed -- same_contract: False
|
||||
else:
|
||||
return False
|
||||
# Otherwise, the contract has changed -- same_contract: False
|
||||
return False
|
||||
|
||||
|
||||
# TODO: rm?
|
||||
@@ -1498,6 +1578,7 @@ class SemanticModel(GraphNode):
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
@property
|
||||
def entity_references(self) -> List[LinkableElementReference]:
|
||||
@@ -1568,17 +1649,26 @@ class SemanticModel(GraphNode):
|
||||
measure is not None
|
||||
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
|
||||
|
||||
if self.defaults is not None:
|
||||
default_agg_time_dimesion = self.defaults.agg_time_dimension
|
||||
default_agg_time_dimension = (
|
||||
self.defaults.agg_time_dimension if self.defaults is not None else None
|
||||
)
|
||||
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimesion
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension
|
||||
assert agg_time_dimension_name is not None, (
|
||||
f"Aggregation time dimension for measure {measure.name} is not set! This should either be set directly on "
|
||||
f"the measure specification in the model, or else defaulted to the primary time dimension in the data "
|
||||
f"source containing the measure."
|
||||
f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! "
|
||||
"To fix this either specify a default `agg_time_dimension` for the semantic model or define an "
|
||||
"`agg_time_dimension` on the measure directly."
|
||||
)
|
||||
return TimeDimensionReference(element_name=agg_time_dimension_name)
|
||||
|
||||
@property
|
||||
def primary_entity_reference(self) -> Optional[EntityReference]:
|
||||
return (
|
||||
EntityReference(element_name=self.primary_entity)
|
||||
if self.primary_entity is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
|
||||
@@ -220,7 +220,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
||||
deprecation_date: Optional[datetime.datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
def __post_init__(self) -> None:
|
||||
if self.latest_version:
|
||||
version_values = [version.v for version in self.versions]
|
||||
if self.latest_version not in version_values:
|
||||
@@ -228,7 +228,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
|
||||
)
|
||||
|
||||
seen_versions: set[str] = set()
|
||||
seen_versions = set()
|
||||
for version in self.versions:
|
||||
if str(version.v) in seen_versions:
|
||||
raise ParsingError(
|
||||
@@ -689,7 +689,7 @@ class UnparsedEntity(dbtClassMixin):
|
||||
class UnparsedNonAdditiveDimension(dbtClassMixin):
|
||||
name: str
|
||||
window_choice: str # AggregationType enum
|
||||
window_groupings: List[str]
|
||||
window_groupings: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -701,6 +701,7 @@ class UnparsedMeasure(dbtClassMixin):
|
||||
agg_params: Optional[MeasureAggregationParameters] = None
|
||||
non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None
|
||||
agg_time_dimension: Optional[str] = None
|
||||
create_metric: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -728,6 +729,7 @@ class UnparsedSemanticModel(dbtClassMixin):
|
||||
entities: List[UnparsedEntity] = field(default_factory=list)
|
||||
measures: List[UnparsedMeasure] = field(default_factory=list)
|
||||
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
|
||||
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
||||
|
||||
@@ -258,6 +258,13 @@ class ArtifactMixin(VersionedSchema, Writable, Readable):
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
"""Our definition of a valid Identifier is the same as what's valid for an unquoted database table name.
|
||||
|
||||
That is:
|
||||
1. It can contain a-z, A-Z, 0-9, and _
|
||||
1. It cannot start with a number
|
||||
"""
|
||||
|
||||
ValidationRegex = r"^[^\d\W]\w*$"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -51,19 +51,15 @@ class LocalPinnedPackage(LocalPackageMixin, PinnedPackage):
|
||||
src_path = self.resolve_path(project)
|
||||
dest_path = self.get_installation_path(project, renderer)
|
||||
|
||||
can_create_symlink = system.supports_symlinks()
|
||||
|
||||
if system.path_exists(dest_path):
|
||||
if not system.path_is_symlink(dest_path):
|
||||
system.rmdir(dest_path)
|
||||
else:
|
||||
system.remove_file(dest_path)
|
||||
|
||||
if can_create_symlink:
|
||||
try:
|
||||
fire_event(DepsCreatingLocalSymlink())
|
||||
system.make_symlink(src_path, dest_path)
|
||||
|
||||
else:
|
||||
except OSError:
|
||||
fire_event(DepsSymlinkNotAvailable())
|
||||
shutil.copytree(src_path, dest_path)
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@ import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import threading
|
||||
import traceback
|
||||
from typing import Any, Callable, List, Optional, TextIO
|
||||
from typing import Any, Callable, List, Optional, TextIO, Protocol
|
||||
from uuid import uuid4
|
||||
from dbt.events.format import timestamp_to_datetime_string
|
||||
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg
|
||||
|
||||
import dbt.utils
|
||||
|
||||
# A Filter is a function which takes a BaseEvent and returns True if the event
|
||||
# should be logged, False otherwise.
|
||||
@@ -80,6 +80,7 @@ class LoggerConfig:
|
||||
use_colors: bool = False
|
||||
output_stream: Optional[TextIO] = None
|
||||
output_file_name: Optional[str] = None
|
||||
output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb
|
||||
logger: Optional[Any] = None
|
||||
|
||||
|
||||
@@ -100,7 +101,7 @@ class _Logger:
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
maxBytes=config.output_file_max_bytes, # type: ignore
|
||||
backupCount=5,
|
||||
)
|
||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||
@@ -175,7 +176,7 @@ class _JsonLogger(_Logger):
|
||||
from dbt.events.functions import msg_to_dict
|
||||
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
line = self.scrubber(raw_log_line) # type: ignore
|
||||
return line
|
||||
|
||||
@@ -205,7 +206,7 @@ class EventManager:
|
||||
for callback in self.callbacks:
|
||||
callback(msg)
|
||||
|
||||
def add_logger(self, config: LoggerConfig):
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
logger = (
|
||||
_JsonLogger(self, config)
|
||||
if config.line_format == LineFormat.Json
|
||||
@@ -217,3 +218,25 @@ class EventManager:
|
||||
def flush(self):
|
||||
for logger in self.loggers:
|
||||
logger.flush()
|
||||
|
||||
|
||||
class IEventManager(Protocol):
|
||||
callbacks: List[Callable[[EventMsg], None]]
|
||||
invocation_id: str
|
||||
|
||||
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
...
|
||||
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
...
|
||||
|
||||
|
||||
class TestEventManager(IEventManager):
|
||||
def __init__(self):
|
||||
self.event_history = []
|
||||
|
||||
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
self.event_history.append((e, level))
|
||||
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from dbt.constants import METADATA_ENV_PREFIX
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, EventMsg
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter, IEventManager
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import Formatting, Note
|
||||
from dbt.events.types import Note
|
||||
from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER
|
||||
from dbt.logger import GLOBAL_LOGGER, make_log_dir_if_missing
|
||||
from functools import partial
|
||||
@@ -13,6 +13,7 @@ from typing import Callable, Dict, List, Optional, TextIO
|
||||
import uuid
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
|
||||
import dbt.utils
|
||||
|
||||
LOG_VERSION = 3
|
||||
metadata_vars: Optional[Dict[str, str]] = None
|
||||
@@ -67,7 +68,11 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
||||
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logfile_config(
|
||||
log_file, flags.USE_COLORS_FILE, log_file_format, log_level_file
|
||||
log_file,
|
||||
flags.USE_COLORS_FILE,
|
||||
log_file_format,
|
||||
log_level_file,
|
||||
flags.LOG_FILE_MAX_BYTES,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -110,13 +115,15 @@ def _stdout_filter(
|
||||
line_format: LineFormat,
|
||||
msg: EventMsg,
|
||||
) -> bool:
|
||||
return (msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events) and not (
|
||||
line_format == LineFormat.Json and type(msg.data) == Formatting
|
||||
)
|
||||
return msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events
|
||||
|
||||
|
||||
def _get_logfile_config(
|
||||
log_path: str, use_colors: bool, line_format: LineFormat, level: EventLevel
|
||||
log_path: str,
|
||||
use_colors: bool,
|
||||
line_format: LineFormat,
|
||||
level: EventLevel,
|
||||
log_file_max_bytes: int,
|
||||
) -> LoggerConfig:
|
||||
return LoggerConfig(
|
||||
name="file_log",
|
||||
@@ -126,14 +133,13 @@ def _get_logfile_config(
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
||||
output_file_name=log_path,
|
||||
output_file_max_bytes=log_file_max_bytes,
|
||||
)
|
||||
|
||||
|
||||
def _logfile_filter(log_cache_events: bool, line_format: LineFormat, msg: EventMsg) -> bool:
|
||||
return (
|
||||
msg.info.code not in nofile_codes
|
||||
and not (msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events)
|
||||
and not (line_format == LineFormat.Json and type(msg.data) == Formatting)
|
||||
return msg.info.code not in nofile_codes and not (
|
||||
msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events
|
||||
)
|
||||
|
||||
|
||||
@@ -172,7 +178,7 @@ def cleanup_event_logger():
|
||||
# Since dbt-rpc does not do its own log setup, and since some events can
|
||||
# currently fire before logs can be configured by setup_event_logger(), we
|
||||
# create a default configuration with default settings and no file output.
|
||||
EVENT_MANAGER: EventManager = EventManager()
|
||||
EVENT_MANAGER: IEventManager = EventManager()
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logbook_log_config(False, True, False, False) # type: ignore
|
||||
if ENABLE_LEGACY_LOGGER
|
||||
@@ -200,7 +206,7 @@ def stop_capture_stdout_logs():
|
||||
# the message may contain secrets which must be scrubbed at the usage site.
|
||||
def msg_to_json(msg: EventMsg) -> str:
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
return raw_log_line
|
||||
|
||||
|
||||
@@ -263,7 +269,7 @@ def fire_event(e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
|
||||
def get_metadata_vars() -> Dict[str, str]:
|
||||
global metadata_vars
|
||||
if metadata_vars is None:
|
||||
if not metadata_vars:
|
||||
metadata_vars = {
|
||||
k[len(METADATA_ENV_PREFIX) :]: v
|
||||
for k, v in os.environ.items()
|
||||
@@ -285,3 +291,8 @@ def set_invocation_id() -> None:
|
||||
# This is primarily for setting the invocation_id for separate
|
||||
# commands in the dbt servers. It shouldn't be necessary for the CLI.
|
||||
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
|
||||
|
||||
|
||||
def ctx_set_event_manager(event_manager: IEventManager):
|
||||
global EVENT_MANAGER
|
||||
EVENT_MANAGER = event_manager
|
||||
|
||||
@@ -66,6 +66,27 @@ message ReferenceKeyMsg {
|
||||
string identifier = 3;
|
||||
}
|
||||
|
||||
//ColumnType
|
||||
message ColumnType {
|
||||
string column_name = 1;
|
||||
string previous_column_type = 2;
|
||||
string current_column_type = 3;
|
||||
}
|
||||
|
||||
// ColumnConstraint
|
||||
message ColumnConstraint {
|
||||
string column_name = 1;
|
||||
string constraint_name = 2;
|
||||
string constraint_type = 3;
|
||||
}
|
||||
|
||||
// ModelConstraint
|
||||
message ModelConstraint {
|
||||
string constraint_name = 1;
|
||||
string constraint_type = 2;
|
||||
repeated string columns = 3;
|
||||
}
|
||||
|
||||
// GenericMessage, used for deserializing only
|
||||
message GenericMessage {
|
||||
EventInfo info = 1;
|
||||
@@ -1248,6 +1269,24 @@ message SemanticValidationFailureMsg {
|
||||
SemanticValidationFailure data = 2;
|
||||
}
|
||||
|
||||
// I071
|
||||
message UnversionedBreakingChange {
|
||||
repeated string breaking_changes = 1;
|
||||
string model_name = 2;
|
||||
string model_file_path = 3;
|
||||
bool contract_enforced_disabled = 4;
|
||||
repeated string columns_removed = 5;
|
||||
repeated ColumnType column_type_changes = 6;
|
||||
repeated ColumnConstraint enforced_column_constraint_removed = 7;
|
||||
repeated ModelConstraint enforced_model_constraint_removed = 8;
|
||||
repeated string materialization_changed = 9;
|
||||
}
|
||||
|
||||
message UnversionedBreakingChangeMsg {
|
||||
EventInfo info = 1;
|
||||
UnversionedBreakingChange data = 2;
|
||||
}
|
||||
|
||||
|
||||
// M - Deps generation
|
||||
|
||||
@@ -2245,25 +2284,7 @@ message CheckNodeTestFailureMsg {
|
||||
CheckNodeTestFailure data = 2;
|
||||
}
|
||||
|
||||
// Z028
|
||||
message FirstRunResultError {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message FirstRunResultErrorMsg {
|
||||
EventInfo info = 1;
|
||||
FirstRunResultError data = 2;
|
||||
}
|
||||
|
||||
// Z029
|
||||
message AfterFirstRunResultError {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message AfterFirstRunResultErrorMsg {
|
||||
EventInfo info = 1;
|
||||
AfterFirstRunResultError data = 2;
|
||||
}
|
||||
// Skipped Z028, Z029
|
||||
|
||||
// Z030
|
||||
message EndOfRunSummary {
|
||||
|
||||
@@ -1233,6 +1233,20 @@ class SemanticValidationFailure(WarnLevel):
|
||||
return self.msg
|
||||
|
||||
|
||||
class UnversionedBreakingChange(WarnLevel):
|
||||
def code(self):
|
||||
return "I071"
|
||||
|
||||
def message(self) -> str:
|
||||
reasons = "\n - ".join(self.breaking_changes)
|
||||
|
||||
return (
|
||||
f"Breaking change to contracted, unversioned model {self.model_name} ({self.model_file_path})"
|
||||
"\nWhile comparing to previous project state, dbt detected a breaking change to an unversioned model."
|
||||
f"\n - {reasons}\n"
|
||||
)
|
||||
|
||||
|
||||
# =======================================================
|
||||
# M - Deps generation
|
||||
# =======================================================
|
||||
@@ -2171,25 +2185,7 @@ class CheckNodeTestFailure(InfoLevel):
|
||||
return f" See test failures:\n {border}\n {msg}\n {border}"
|
||||
|
||||
|
||||
# FirstRunResultError and AfterFirstRunResultError are just splitting the message from the result
|
||||
# object into multiple log lines
|
||||
# TODO: is this reallly needed? See printer.py
|
||||
|
||||
|
||||
class FirstRunResultError(ErrorLevel):
|
||||
def code(self):
|
||||
return "Z028"
|
||||
|
||||
def message(self) -> str:
|
||||
return yellow(self.msg)
|
||||
|
||||
|
||||
class AfterFirstRunResultError(ErrorLevel):
|
||||
def code(self):
|
||||
return "Z029"
|
||||
|
||||
def message(self) -> str:
|
||||
return self.msg
|
||||
# Skipped Z028, Z029
|
||||
|
||||
|
||||
class EndOfRunSummary(InfoLevel):
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -3,7 +3,7 @@ import json
|
||||
import re
|
||||
import io
|
||||
import agate
|
||||
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
|
||||
from typing import Any, Dict, List, Mapping, Optional, Union
|
||||
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
@@ -213,67 +213,22 @@ class ContractBreakingChangeError(DbtRuntimeError):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
contract_enforced_disabled: bool,
|
||||
columns_removed: List[str],
|
||||
column_type_changes: List[Tuple[str, str, str]],
|
||||
enforced_column_constraint_removed: List[Tuple[str, str]],
|
||||
enforced_model_constraint_removed: List[Tuple[str, List[str]]],
|
||||
materialization_changed: List[str],
|
||||
breaking_changes: List[str],
|
||||
node=None,
|
||||
):
|
||||
self.contract_enforced_disabled = contract_enforced_disabled
|
||||
self.columns_removed = columns_removed
|
||||
self.column_type_changes = column_type_changes
|
||||
self.enforced_column_constraint_removed = enforced_column_constraint_removed
|
||||
self.enforced_model_constraint_removed = enforced_model_constraint_removed
|
||||
self.materialization_changed = materialization_changed
|
||||
self.breaking_changes = breaking_changes
|
||||
super().__init__(self.message(), node)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "Breaking Change to Contract"
|
||||
return "Breaking change to contract"
|
||||
|
||||
def message(self):
|
||||
breaking_changes = []
|
||||
if self.contract_enforced_disabled:
|
||||
breaking_changes.append("The contract's enforcement has been disabled.")
|
||||
if self.columns_removed:
|
||||
columns_removed_str = "\n - ".join(self.columns_removed)
|
||||
breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}")
|
||||
if self.column_type_changes:
|
||||
column_type_changes_str = "\n - ".join(
|
||||
[f"{c[0]} ({c[1]} -> {c[2]})" for c in self.column_type_changes]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Columns with data_type changes: \n - {column_type_changes_str}"
|
||||
)
|
||||
if self.enforced_column_constraint_removed:
|
||||
column_constraint_changes_str = "\n - ".join(
|
||||
[f"{c[0]} ({c[1]})" for c in self.enforced_column_constraint_removed]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}"
|
||||
)
|
||||
if self.enforced_model_constraint_removed:
|
||||
model_constraint_changes_str = "\n - ".join(
|
||||
[f"{c[0]} -> {c[1]}" for c in self.enforced_model_constraint_removed]
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}"
|
||||
)
|
||||
if self.materialization_changed:
|
||||
materialization_changes_str = "\n - ".join(
|
||||
f"{self.materialization_changed[0]} -> {self.materialization_changed[1]}"
|
||||
)
|
||||
breaking_changes.append(
|
||||
f"Materialization changed with enforced constraints: \n - {materialization_changes_str}"
|
||||
)
|
||||
|
||||
reasons = "\n\n".join(breaking_changes)
|
||||
reasons = "\n - ".join(self.breaking_changes)
|
||||
|
||||
return (
|
||||
"While comparing to previous project state, dbt detected a breaking change to an enforced contract."
|
||||
f"\n\n{reasons}\n\n"
|
||||
f"\n - {reasons}\n"
|
||||
"Consider making an additive (non-breaking) change instead, if possible.\n"
|
||||
"Otherwise, create a new model version: https://docs.getdbt.com/docs/collaborate/govern/model-versions"
|
||||
)
|
||||
@@ -486,7 +441,7 @@ class InvalidConnectionError(DbtRuntimeError):
|
||||
self.thread_id = thread_id
|
||||
self.known = known
|
||||
super().__init__(
|
||||
msg="connection never acquired for thread {self.thread_id}, have {self.known}"
|
||||
msg=f"connection never acquired for thread {self.thread_id}, have {self.known}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric]
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def __init__(
|
||||
self, manifest: Manifest, previous_state: Optional[PreviousState], arguments: List[str]
|
||||
):
|
||||
) -> None:
|
||||
self.manifest: Manifest = manifest
|
||||
self.previous_state = previous_state
|
||||
self.arguments: List[str] = arguments
|
||||
@@ -467,7 +467,7 @@ class TestTypeSelectorMethod(SelectorMethod):
|
||||
|
||||
|
||||
class StateSelectorMethod(SelectorMethod):
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.modified_macros: Optional[List[str]] = None
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
{% macro drop_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__drop_relation(relation) -%}
|
||||
{% call statement('drop_relation', auto_begin=False) -%}
|
||||
{%- if relation.is_table -%}
|
||||
{{- drop_table(relation) -}}
|
||||
{%- elif relation.is_view -%}
|
||||
{{- drop_view(relation) -}}
|
||||
{%- elif relation.is_materialized_view -%}
|
||||
{{- drop_materialized_view(relation) -}}
|
||||
{%- else -%}
|
||||
drop {{ relation.type }} if exists {{ relation }} cascade
|
||||
{%- endif -%}
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro drop_table(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__drop_table(relation) -%}
|
||||
drop table if exists {{ relation }} cascade
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro drop_view(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__drop_view(relation) -%}
|
||||
drop view if exists {{ relation }} cascade
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro drop_materialized_view(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__drop_materialized_view(relation) -%}
|
||||
drop materialized view if exists {{ relation }} cascade
|
||||
{%- endmacro %}
|
||||
@@ -63,3 +63,12 @@
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_relations() %}
|
||||
{{ return(adapter.dispatch('get_relations', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_relations() %}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'get_relations macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -43,18 +43,6 @@
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro rename_relation(from_relation, to_relation) -%}
|
||||
{{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__rename_relation(from_relation, to_relation) -%}
|
||||
{% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}
|
||||
{% call statement('rename_relation') -%}
|
||||
alter table {{ from_relation }} rename to {{ target_name }}
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro get_or_create_relation(database, schema, identifier, type) -%}
|
||||
{{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
{# /*
|
||||
This only exists for backwards compatibility for 1.6.0. In later versions, the general `get_replace_sql`
|
||||
macro is called as replace is inherently not limited to a single relation (it takes in two relations).
|
||||
*/ #}
|
||||
|
||||
{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}
|
||||
{{- log('Applying REPLACE to: ' ~ relation) -}}
|
||||
{{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}}
|
||||
@@ -5,5 +5,7 @@
|
||||
|
||||
|
||||
{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}
|
||||
{{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }}
|
||||
{{ exceptions.raise_compiler_error(
|
||||
"`get_create_materialized_view_as_sql` has not been implemented for this adapter."
|
||||
) }}
|
||||
{% endmacro %}
|
||||
@@ -0,0 +1,13 @@
|
||||
{# /*
|
||||
This was already implemented. Instead of creating a new macro that aligns with the standard,
|
||||
this was reused and the default was maintained. This gets called by `drop_relation`, which
|
||||
actually executes the drop, and `get_drop_sql`, which returns the template.
|
||||
*/ #}
|
||||
|
||||
{% macro drop_materialized_view(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__drop_materialized_view(relation) -%}
|
||||
drop materialized view if exists {{ relation }} cascade
|
||||
{%- endmacro %}
|
||||
@@ -5,5 +5,5 @@
|
||||
|
||||
|
||||
{% macro default__refresh_materialized_view(relation) %}
|
||||
{{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }}
|
||||
{{ exceptions.raise_compiler_error("`refresh_materialized_view` has not been implemented for this adapter.") }}
|
||||
{% endmacro %}
|
||||
@@ -0,0 +1,13 @@
|
||||
{# /*
|
||||
This was already implemented. Instead of creating a new macro that aligns with the standard,
|
||||
this was reused and the default was maintained. This gets called by `drop_relation`, which
|
||||
actually executes the drop, and `get_drop_sql`, which returns the template.
|
||||
*/ #}
|
||||
|
||||
{% macro drop_table(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__drop_table(relation) -%}
|
||||
drop table if exists {{ relation }} cascade
|
||||
{%- endmacro %}
|
||||
@@ -0,0 +1,13 @@
|
||||
{# /*
|
||||
This was already implemented. Instead of creating a new macro that aligns with the standard,
|
||||
this was reused and the default was maintained. This gets called by `drop_relation`, which
|
||||
actually executes the drop, and `get_drop_sql`, which returns the template.
|
||||
*/ #}
|
||||
|
||||
{% macro drop_view(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__drop_view(relation) -%}
|
||||
drop view if exists {{ relation }} cascade
|
||||
{%- endmacro %}
|
||||
@@ -19,7 +19,7 @@
|
||||
{% set day_count = (end_date - start_date).days %}
|
||||
{% if day_count < 0 %}
|
||||
{% set msg -%}
|
||||
Partiton start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
Partition start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
{%- endset %}
|
||||
|
||||
{{ exceptions.raise_compiler_error(msg, model) }}
|
||||
|
||||
@@ -33,7 +33,12 @@
|
||||
|
||||
-- cleanup
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
@@ -45,7 +45,12 @@
|
||||
-- cleanup
|
||||
-- move the existing view out of the way
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
|
||||
17
core/dbt/include/global_project/macros/relations/drop.sql
Normal file
17
core/dbt/include/global_project/macros/relations/drop.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
{% macro drop_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__drop_relation(relation) -%}
|
||||
{% call statement('drop_relation', auto_begin=False) -%}
|
||||
{%- if relation.is_table -%}
|
||||
{{- drop_table(relation) -}}
|
||||
{%- elif relation.is_view -%}
|
||||
{{- drop_view(relation) -}}
|
||||
{%- elif relation.is_materialized_view -%}
|
||||
{{- drop_materialized_view(relation) -}}
|
||||
{%- else -%}
|
||||
drop {{ relation.type }} if exists {{ relation }} cascade
|
||||
{%- endif -%}
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
10
core/dbt/include/global_project/macros/relations/rename.sql
Normal file
10
core/dbt/include/global_project/macros/relations/rename.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
{% macro rename_relation(from_relation, to_relation) -%}
|
||||
{{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__rename_relation(from_relation, to_relation) -%}
|
||||
{% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}
|
||||
{% call statement('rename_relation') -%}
|
||||
alter table {{ from_relation }} rename to {{ target_name }}
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user