mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-22 11:41:28 +00:00
Compare commits
32 Commits
enable-pos
...
v1.11.0rc3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4322ff07ec | ||
|
|
4fc9e342f1 | ||
|
|
877f092f00 | ||
|
|
a55663d8b9 | ||
|
|
6fe01dd05b | ||
|
|
c17532f497 | ||
|
|
c54f43986c | ||
|
|
e20863c036 | ||
|
|
fae47a6dce | ||
|
|
58391c8506 | ||
|
|
53e9454c9b | ||
|
|
07ad7aeeb5 | ||
|
|
7a792f5bc8 | ||
|
|
0fae291191 | ||
|
|
9deae6ab8b | ||
|
|
f30e2f65c9 | ||
|
|
6bb8100a02 | ||
|
|
a318d9f3d7 | ||
|
|
ce2cd04019 | ||
|
|
b85794a0dc | ||
|
|
f619002fa6 | ||
|
|
4db02533d6 | ||
|
|
0723c5ecb3 | ||
|
|
829b8b2161 | ||
|
|
51c89d67b3 | ||
|
|
e6a608cd6c | ||
|
|
163674685f | ||
|
|
87d2d87737 | ||
|
|
ee6a69a7c3 | ||
|
|
0d59a3d5a4 | ||
|
|
b52eb6f8e7 | ||
|
|
8666c83f26 |
@@ -1,37 +0,0 @@
|
|||||||
[bumpversion]
|
|
||||||
current_version = 1.11.0b4
|
|
||||||
parse = (?P<major>[\d]+) # major version number
|
|
||||||
\.(?P<minor>[\d]+) # minor version number
|
|
||||||
\.(?P<patch>[\d]+) # patch version number
|
|
||||||
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
|
||||||
(?P<prekind>a|b|rc) # pre-release type
|
|
||||||
(?P<num>[\d]+) # pre-release version number
|
|
||||||
)?
|
|
||||||
( # optional nightly release indicator
|
|
||||||
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
|
||||||
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
|
||||||
serialize =
|
|
||||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
|
||||||
{major}.{minor}.{patch}.{nightly}
|
|
||||||
{major}.{minor}.{patch}{prekind}{num}
|
|
||||||
{major}.{minor}.{patch}
|
|
||||||
commit = False
|
|
||||||
tag = False
|
|
||||||
|
|
||||||
[bumpversion:part:prekind]
|
|
||||||
first_value = a
|
|
||||||
optional_value = final
|
|
||||||
values =
|
|
||||||
a
|
|
||||||
b
|
|
||||||
rc
|
|
||||||
final
|
|
||||||
|
|
||||||
[bumpversion:part:num]
|
|
||||||
first_value = 1
|
|
||||||
|
|
||||||
[bumpversion:part:nightly]
|
|
||||||
|
|
||||||
[bumpversion:file:core/pyproject.toml]
|
|
||||||
search = version = "{current_version}"
|
|
||||||
replace = version = "{new_version}"
|
|
||||||
28
.changes/1.11.0-rc1.md
Normal file
28
.changes/1.11.0-rc1.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
## dbt-core 1.11.0-rc1 - November 18, 2025
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Allow for defining funciton arguments with default values ([#12044](https://github.com/dbt-labs/dbt-core/issues/12044))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fix parse error when build_after.count set to 0 ([#12136](https://github.com/dbt-labs/dbt-core/issues/12136))
|
||||||
|
- Stop compiling python udfs like python models ([#12153](https://github.com/dbt-labs/dbt-core/issues/12153))
|
||||||
|
- For metric names, fix bug allowing hyphens (not allowed in metricflow already), make validation throw ValidationErrors (not ParsingErrors), and add tests. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||||
|
- Include macros in unit test parsing ([#10157](https://github.com/dbt-labs/dbt-core/issues/10157))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- add dbt/jsonschemas to manifest.in ([#12126](https://github.com/dbt-labs/dbt-core/issues/12126))
|
||||||
|
- Move from setup.py to pyproject.toml ([#5696](https://github.com/dbt-labs/dbt-core/issues/5696))
|
||||||
|
- Fixes issue where config isn't propagated to metric from measure when set as create_metric:True ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
- Support DBT_ENGINE prefix for record-mode env vars ([#12149](https://github.com/dbt-labs/dbt-core/issues/12149))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Drop support for python 3.9 ([#12118](https://github.com/dbt-labs/dbt-core/issues/12118))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@WilliamDee](https://github.com/WilliamDee) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
- [@nathanskone](https://github.com/nathanskone) ([#10157](https://github.com/dbt-labs/dbt-core/issues/10157))
|
||||||
|
- [@theyostalservice](https://github.com/theyostalservice) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||||
29
.changes/1.11.0-rc2.md
Normal file
29
.changes/1.11.0-rc2.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
## dbt-core 1.11.0-rc2 - December 01, 2025
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Support partial parsing for function nodes ([#12072](https://github.com/dbt-labs/dbt-core/issues/12072))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Allow dbt deps to run when vars lack defaults in dbt_project.yml ([#8913](https://github.com/dbt-labs/dbt-core/issues/8913))
|
||||||
|
- Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names` ([#12152](https://github.com/dbt-labs/dbt-core/issues/12152))
|
||||||
|
- Allow the usage of `function` with `--exclude-resource-type` flag ([#12143](https://github.com/dbt-labs/dbt-core/issues/12143))
|
||||||
|
- Fix bug where schemas of functions weren't guaranteed to exist ([#12142](https://github.com/dbt-labs/dbt-core/issues/12142))
|
||||||
|
- :bug: :snowman: Correctly reference foreign key references when --defer and --state provided ([#11885](https://github.com/dbt-labs/dbt-core/issues/11885))
|
||||||
|
- Fix generation of deprecations summary ([#12146](https://github.com/dbt-labs/dbt-core/issues/12146))
|
||||||
|
- :bug: :snowman: Add exception when using --state and referring to a removed test ([#10630](https://github.com/dbt-labs/dbt-core/issues/10630))
|
||||||
|
- :bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command ([#11627](https://github.com/dbt-labs/dbt-core/issues/11627))
|
||||||
|
- :bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements ([#12049](https://github.com/dbt-labs/dbt-core/issues/12049))
|
||||||
|
- :bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g., varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo ([#11186](https://github.com/dbt-labs/dbt-core/issues/11186))
|
||||||
|
- :bug: :snowman: Support unit testing models that depend on sources with the same name ([#11975](https://github.com/dbt-labs/dbt-core/issues/11975), [#10433](https://github.com/dbt-labs/dbt-core/issues/10433))
|
||||||
|
- :bug: :snowman: Avoid retrying successful run-operation commands ([#11850](https://github.com/dbt-labs/dbt-core/issues/11850))
|
||||||
|
- :bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned: false` ([#9104](https://github.com/dbt-labs/dbt-core/issues/9104))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- Update jsonschemas for schema.yml and dbt_project.yml deprecations ([#12180](https://github.com/dbt-labs/dbt-core/issues/12180))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@asiunov](https://github.com/asiunov) ([#12146](https://github.com/dbt-labs/dbt-core/issues/12146))
|
||||||
|
- [@michellark](https://github.com/michellark) ([#11885](https://github.com/dbt-labs/dbt-core/issues/11885))
|
||||||
21
.changes/1.11.0-rc3.md
Normal file
21
.changes/1.11.0-rc3.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
## dbt-core 1.11.0-rc3 - December 08, 2025
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Raise jsonschema-based deprecation warnings by default ([#12240](https://github.com/dbt-labs/dbt-core/issues/12240))
|
||||||
|
- :bug: :snowman: Disable unit tests whose model is disabled ([#10540](https://github.com/dbt-labs/dbt-core/issues/10540))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test ([#12223](https://github.com/dbt-labs/dbt-core/issues/12223))
|
||||||
|
- :bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better cross-source matching and word boundaries ([#12239](https://github.com/dbt-labs/dbt-core/issues/12239))
|
||||||
|
- :bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs ([#12244](https://github.com/dbt-labs/dbt-core/issues/12244))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- Replace setuptools and tox with hatch for build, test, and environment management. ([#12151](https://github.com/dbt-labs/dbt-core/issues/12151))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@emmyoop](https://github.com/emmyoop) ([#12239](https://github.com/dbt-labs/dbt-core/issues/12239), [#12151](https://github.com/dbt-labs/dbt-core/issues/12151))
|
||||||
|
- [@mattogburke](https://github.com/mattogburke) ([#12223](https://github.com/dbt-labs/dbt-core/issues/12223))
|
||||||
|
- [@michelleark](https://github.com/michelleark) ([#12240](https://github.com/dbt-labs/dbt-core/issues/12240), [#10540](https://github.com/dbt-labs/dbt-core/issues/10540), [#12244](https://github.com/dbt-labs/dbt-core/issues/12244))
|
||||||
6
.changes/1.11.0/Features-20251006-140352.yaml
Normal file
6
.changes/1.11.0/Features-20251006-140352.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support partial parsing for function nodes
|
||||||
|
time: 2025-10-06T14:03:52.258104-05:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12072"
|
||||||
6
.changes/1.11.0/Features-20251117-141053.yaml
Normal file
6
.changes/1.11.0/Features-20251117-141053.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Allow for defining funciton arguments with default values
|
||||||
|
time: 2025-11-17T14:10:53.860178-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12044"
|
||||||
6
.changes/1.11.0/Features-20251201-165209.yaml
Normal file
6
.changes/1.11.0/Features-20251201-165209.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Raise jsonschema-based deprecation warnings by default
|
||||||
|
time: 2025-12-01T16:52:09.354436-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: 12240
|
||||||
6
.changes/1.11.0/Features-20251203-122926.yaml
Normal file
6
.changes/1.11.0/Features-20251203-122926.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: ':bug: :snowman: Disable unit tests whose model is disabled'
|
||||||
|
time: 2025-12-03T12:29:26.209248-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10540"
|
||||||
6
.changes/1.11.0/Fixes-20251117-140649.yaml
Normal file
6
.changes/1.11.0/Fixes-20251117-140649.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Include macros in unit test parsing
|
||||||
|
time: 2025-11-17T14:06:49.518566-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark nathanskone
|
||||||
|
Issue: "10157"
|
||||||
6
.changes/1.11.0/Fixes-20251117-185025.yaml
Normal file
6
.changes/1.11.0/Fixes-20251117-185025.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Allow dbt deps to run when vars lack defaults in dbt_project.yml
|
||||||
|
time: 2025-11-17T18:50:25.759091+05:30
|
||||||
|
custom:
|
||||||
|
Author: 3loka
|
||||||
|
Issue: "8913"
|
||||||
6
.changes/1.11.0/Fixes-20251118-171106.yaml
Normal file
6
.changes/1.11.0/Fixes-20251118-171106.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names`
|
||||||
|
time: 2025-11-18T17:11:06.454784-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "12152"
|
||||||
6
.changes/1.11.0/Fixes-20251119-195034.yaml
Normal file
6
.changes/1.11.0/Fixes-20251119-195034.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Allow the usage of `function` with `--exclude-resource-type` flag
|
||||||
|
time: 2025-11-19T19:50:34.703236-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12143"
|
||||||
6
.changes/1.11.0/Fixes-20251124-155629.yaml
Normal file
6
.changes/1.11.0/Fixes-20251124-155629.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix bug where schemas of functions weren't guaranteed to exist
|
||||||
|
time: 2025-11-24T15:56:29.467004-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12142"
|
||||||
6
.changes/1.11.0/Fixes-20251124-155756.yaml
Normal file
6
.changes/1.11.0/Fixes-20251124-155756.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix generation of deprecations summary
|
||||||
|
time: 2025-11-24T15:57:56.544123-08:00
|
||||||
|
custom:
|
||||||
|
Author: asiunov
|
||||||
|
Issue: "12146"
|
||||||
6
.changes/1.11.0/Fixes-20251124-170855.yaml
Normal file
6
.changes/1.11.0/Fixes-20251124-170855.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Correctly reference foreign key references when --defer and --state provided'
|
||||||
|
time: 2025-11-24T17:08:55.387946-05:00
|
||||||
|
custom:
|
||||||
|
Author: michellark
|
||||||
|
Issue: "11885"
|
||||||
7
.changes/1.11.0/Fixes-20251125-120246.yaml
Normal file
7
.changes/1.11.0/Fixes-20251125-120246.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Add exception when using --state and referring to a removed
|
||||||
|
test'
|
||||||
|
time: 2025-11-25T12:02:46.635026-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "10630"
|
||||||
6
.changes/1.11.0/Fixes-20251125-122020.yaml
Normal file
6
.changes/1.11.0/Fixes-20251125-122020.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command'
|
||||||
|
time: 2025-11-25T12:20:20.132379-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11627"
|
||||||
6
.changes/1.11.0/Fixes-20251127-141308.yaml
Normal file
6
.changes/1.11.0/Fixes-20251127-141308.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ":bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements"
|
||||||
|
time: 2025-11-27T14:13:08.082542-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "12049"
|
||||||
7
.changes/1.11.0/Fixes-20251127-145929.yaml
Normal file
7
.changes/1.11.0/Fixes-20251127-145929.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g.,
|
||||||
|
varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo'
|
||||||
|
time: 2025-11-27T14:59:29.256274-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "11186"
|
||||||
6
.changes/1.11.0/Fixes-20251127-170124.yaml
Normal file
6
.changes/1.11.0/Fixes-20251127-170124.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Support unit testing models that depend on sources with the same name'
|
||||||
|
time: 2025-11-27T17:01:24.193516-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: 11975 10433
|
||||||
6
.changes/1.11.0/Fixes-20251128-102129.yaml
Normal file
6
.changes/1.11.0/Fixes-20251128-102129.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test
|
||||||
|
time: 2025-11-28T10:21:29.911147Z
|
||||||
|
custom:
|
||||||
|
Author: mattogburke
|
||||||
|
Issue: "12223"
|
||||||
6
.changes/1.11.0/Fixes-20251128-122838.yaml
Normal file
6
.changes/1.11.0/Fixes-20251128-122838.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Avoid retrying successful run-operation commands'
|
||||||
|
time: 2025-11-28T12:28:38.546261-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "11850"
|
||||||
7
.changes/1.11.0/Fixes-20251128-161937.yaml
Normal file
7
.changes/1.11.0/Fixes-20251128-161937.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned:
|
||||||
|
false`'
|
||||||
|
time: 2025-11-28T16:19:37.608722-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "9104"
|
||||||
7
.changes/1.11.0/Fixes-20251128-163144.yaml
Normal file
7
.changes/1.11.0/Fixes-20251128-163144.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better
|
||||||
|
cross-source matching and word boundaries'
|
||||||
|
time: 2025-11-28T16:31:44.344099-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "12239"
|
||||||
6
.changes/1.11.0/Fixes-20251202-133705.yaml
Normal file
6
.changes/1.11.0/Fixes-20251202-133705.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs'
|
||||||
|
time: 2025-12-02T13:37:05.012112-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "12244"
|
||||||
6
.changes/1.11.0/Under the Hood-20251119-110110.yaml
Normal file
6
.changes/1.11.0/Under the Hood-20251119-110110.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Update jsonschemas for schema.yml and dbt_project.yml deprecations
|
||||||
|
time: 2025-11-19T11:01:10.616676-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "12180"
|
||||||
6
.changes/1.11.0/Under the Hood-20251121-140515.yaml
Normal file
6
.changes/1.11.0/Under the Hood-20251121-140515.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Replace setuptools and tox with hatch for build, test, and environment management.
|
||||||
|
time: 2025-11-21T14:05:15.838252-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "12151"
|
||||||
@@ -41,32 +41,26 @@ newlines:
|
|||||||
endOfVersion: 1
|
endOfVersion: 1
|
||||||
|
|
||||||
custom:
|
custom:
|
||||||
- key: Author
|
- key: Author
|
||||||
label: GitHub Username(s) (separated by a single space if multiple)
|
label: GitHub Username(s) (separated by a single space if multiple)
|
||||||
type: string
|
type: string
|
||||||
minLength: 3
|
minLength: 3
|
||||||
- key: Issue
|
- key: Issue
|
||||||
label: GitHub Issue Number (separated by a single space if multiple)
|
label: GitHub Issue Number (separated by a single space if multiple)
|
||||||
type: string
|
type: string
|
||||||
minLength: 1
|
minLength: 1
|
||||||
|
|
||||||
footerFormat: |
|
footerFormat: |
|
||||||
{{- $contributorDict := dict }}
|
{{- $contributorDict := dict }}
|
||||||
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
{{- /* ensure we always skip snyk and dependabot */}}
|
||||||
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
{{- $bots := list "dependabot[bot]" "snyk-bot"}}
|
||||||
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
|
||||||
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
|
||||||
{{- range $team_member := $core_team }}
|
|
||||||
{{- $team_member_lower := lower $team_member }}
|
|
||||||
{{- $maintainers = append $maintainers $team_member_lower }}
|
|
||||||
{{- end }}
|
|
||||||
{{- range $change := .Changes }}
|
{{- range $change := .Changes }}
|
||||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||||
{{- /* loop through all authors for a single changelog */}}
|
{{- /* loop through all authors for a single changelog */}}
|
||||||
{{- range $author := $authorList }}
|
{{- range $author := $authorList }}
|
||||||
{{- $authorLower := lower $author }}
|
{{- $authorLower := lower $author }}
|
||||||
{{- /* we only want to include non-core team contributors */}}
|
{{- /* we only want to include non-bot contributors */}}
|
||||||
{{- if not (has $authorLower $maintainers)}}
|
{{- if not (has $authorLower $bots)}}
|
||||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||||
{{- $IssueList := list }}
|
{{- $IssueList := list }}
|
||||||
{{- $changeLink := $change.Kind }}
|
{{- $changeLink := $change.Kind }}
|
||||||
|
|||||||
388
.github/workflows/cut-release-branch.yml
vendored
388
.github/workflows/cut-release-branch.yml
vendored
@@ -1,25 +1,44 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Cuts a new `*.latest` branch
|
# Cuts the `*.latest` branch, bumps dependencies on it, cleans up all files in `.changes/unreleased`
|
||||||
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
# and `.changes/previous verion on main and bumps main to the input version.
|
||||||
# `main` and bumps `main` to the input version.
|
|
||||||
|
|
||||||
# **why?**
|
# **why?**
|
||||||
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
# Clean up the main branch after a release branch is cut and automate cutting the release branch.
|
||||||
|
# Generally reduces the workload of engineers and reducing error.
|
||||||
|
|
||||||
# **when?**
|
# **when?**
|
||||||
# This will run when called manually.
|
# This will run when called manually or when triggered in another workflow.
|
||||||
|
|
||||||
|
# Example Usage including required permissions: TODO: update once finalized
|
||||||
|
|
||||||
|
# permissions:
|
||||||
|
# contents: read
|
||||||
|
# pull-requests: write
|
||||||
|
#
|
||||||
|
# name: Cut Release Branch
|
||||||
|
# jobs:
|
||||||
|
# changelog:
|
||||||
|
# uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||||
|
# with:
|
||||||
|
# new_branch_name: 1.7.latest
|
||||||
|
# PR_title: "Cleanup main after cutting new 1.7.latest branch"
|
||||||
|
# PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||||
|
# secrets:
|
||||||
|
# FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
# TODOs
|
||||||
|
# add note to eventually commit changes directly and bypass checks - same as release - when we move to this model run test action after merge
|
||||||
|
|
||||||
name: Cut new release branch
|
name: Cut new release branch
|
||||||
|
run-name: "Cutting New Branch: ${{ inputs.new_branch_name }}"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
version_to_bump_main:
|
|
||||||
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
|
||||||
required: true
|
|
||||||
new_branch_name:
|
new_branch_name:
|
||||||
description: 'The full name of the new branch (ex. 1.5.latest)'
|
description: "The full name of the new branch (ex. 1.5.latest)"
|
||||||
required: true
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -27,15 +46,346 @@ defaults:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
PYTHON_TARGET_VERSION: "3.10"
|
||||||
|
PR_TITLE: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||||
|
PR_BODY: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cut_branch:
|
prep_work:
|
||||||
name: "Cut branch and clean up main for dbt-core"
|
name: "Prep Work"
|
||||||
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
runs-on: ubuntu-latest
|
||||||
with:
|
steps:
|
||||||
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
- name: "[DEBUG] Print Inputs"
|
||||||
new_branch_name: ${{ inputs.new_branch_name }}
|
run: |
|
||||||
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
echo "new_branch_name: ${{ inputs.new_branch_name }}"
|
||||||
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
echo "PR_title: ${{ env.PR_TITLE }}"
|
||||||
secrets:
|
echo "PR_body: ${{ env.PR_BODY }}"
|
||||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
create_temp_branch:
|
||||||
|
name: "Create Temp branch off main"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
temp_branch_name: ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Set Branch Value"
|
||||||
|
id: variables
|
||||||
|
run: |
|
||||||
|
echo "BRANCH_NAME=cutting_release_branch/main_cleanup_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: "main"
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Create PR Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||||
|
git push --set-upstream origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: "[Notification] Temp branch created"
|
||||||
|
run: |
|
||||||
|
message="Temp branch ${{ steps.variables.outputs.BRANCH_NAME }} created"
|
||||||
|
echo "::notice title="Temporary branch created": $title::$message"
|
||||||
|
|
||||||
|
cleanup_changelog:
|
||||||
|
name: "Clean Up Changelog"
|
||||||
|
needs: ["create_temp_branch"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
next-version: ${{ steps.semver-current.outputs.next-minor-alpha-version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Add Homebrew To PATH"
|
||||||
|
run: |
|
||||||
|
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: "Install Homebrew Packages"
|
||||||
|
run: |
|
||||||
|
brew install pre-commit
|
||||||
|
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
||||||
|
brew install changie
|
||||||
|
|
||||||
|
- name: "Check Current Version In Code"
|
||||||
|
id: determine_version
|
||||||
|
run: |
|
||||||
|
current_version=$(grep '^version = ' core/pyproject.toml | sed 's/version = "\(.*\)"/\1/')
|
||||||
|
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "[Notification] Check Current Version In Code"
|
||||||
|
run: |
|
||||||
|
message="The current version is ${{ steps.determine_version.outputs.current_version }}"
|
||||||
|
echo "::notice title="Version Bump Check": $title::$message"
|
||||||
|
|
||||||
|
- name: "Parse Current Version Into Parts for Changelog Directories"
|
||||||
|
id: semver-current
|
||||||
|
uses: dbt-labs/actions/parse-semver@main
|
||||||
|
with:
|
||||||
|
version: ${{ steps.determine_version.outputs.current_version }}
|
||||||
|
|
||||||
|
- name: "[Notification] Next Alpha Version"
|
||||||
|
run: |
|
||||||
|
message="The next alpha version is ${{ steps.semver-current.outputs.next-minor-alpha-version }}"
|
||||||
|
echo "::notice title="Version Bump Check": $title::$message"
|
||||||
|
|
||||||
|
- name: "Delete Unreleased Changelog YAMLs"
|
||||||
|
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm .changes/unreleased/*.yaml || true
|
||||||
|
|
||||||
|
- name: "Delete Pre Release Changelogs and YAMLs"
|
||||||
|
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm .changes/${{ steps.semver-current.outputs.base-version }}/*.yaml || true
|
||||||
|
rm .changes/${{ steps.semver-current.outputs.major }}.${{ steps.semver-current.outputs.minor }}.*.md || true
|
||||||
|
|
||||||
|
- name: "Cleanup CHANGELOG.md"
|
||||||
|
run: |
|
||||||
|
changie merge
|
||||||
|
|
||||||
|
- name: "Commit Changelog Cleanup to Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "Clean up changelog on main"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: "[Notification] Changelog cleaned up"
|
||||||
|
run: |
|
||||||
|
message="Changelog on ${{ needs.create_temp_branch.outputs.temp_branch_name }} cleaned up"
|
||||||
|
echo "::notice title="Changelog cleaned up": $title::$message"
|
||||||
|
|
||||||
|
bump_version:
|
||||||
|
name: "Bump to next minor version"
|
||||||
|
needs: ["cleanup_changelog", "create_temp_branch"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
|
||||||
|
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "${{ env.PYTHON_TARGET_VERSION }}"
|
||||||
|
|
||||||
|
- name: "Install Spark Dependencies"
|
||||||
|
if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install libsasl2-dev
|
||||||
|
|
||||||
|
- name: "Install Python Dependencies"
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install hatch
|
||||||
|
|
||||||
|
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||||
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch version ${{ needs.cleanup_changelog.outputs.next-version }}
|
||||||
|
hatch run dev-req
|
||||||
|
dbt --version
|
||||||
|
|
||||||
|
- name: "Commit Version Bump to Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "Bumping version to ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: "[Notification] Version Bump completed"
|
||||||
|
run: |
|
||||||
|
message="Version on ${{ needs.create_temp_branch.outputs.temp_branch_name }} bumped to ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||||
|
echo "::notice title="Version Bump Completed": $title::$message"
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
name: "Cleanup Code Quality"
|
||||||
|
needs: ["create_temp_branch", "bump_version"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Add Homebrew To PATH"
|
||||||
|
run: |
|
||||||
|
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: "brew install pre-commit"
|
||||||
|
run: |
|
||||||
|
brew install pre-commit
|
||||||
|
|
||||||
|
# this step will fail on whitespace errors but also correct them
|
||||||
|
- name: "Cleanup - Remove Trailing Whitespace Via Pre-commit"
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* || true
|
||||||
|
|
||||||
|
# this step will fail on newline errors but also correct them
|
||||||
|
- name: "Cleanup - Remove Extra Newlines Via Pre-commit"
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* || true
|
||||||
|
|
||||||
|
- name: "Commit Version Bump to Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "Code quality cleanup"
|
||||||
|
git push
|
||||||
|
|
||||||
|
open_pr:
|
||||||
|
name: "Open PR Against main"
|
||||||
|
needs: ["cleanup_changelog", "create_temp_branch", "cleanup"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
pr_number: ${{ steps.create_pr.outputs.pull-request-number }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Determine PR Title"
|
||||||
|
id: pr_title
|
||||||
|
run: |
|
||||||
|
echo "pr_title=${{ env.PR_TITLE }}" >> $GITHUB_OUTPUT
|
||||||
|
if [${{ env.PR_TITLE }} == ""]; then
|
||||||
|
echo "pr_title='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Determine PR Body"
|
||||||
|
id: pr_body
|
||||||
|
run: |
|
||||||
|
echo "pr_body=${{ env.PR_BODY }}" >> $GITHUB_OUTPUT
|
||||||
|
if [${{ env.PR_BODY }} == ""]; then
|
||||||
|
echo "pr_body='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Add Branch Details"
|
||||||
|
id: pr_body_branch
|
||||||
|
run: |
|
||||||
|
branch_details="The workflow that generated this PR also created a new branch: ${{ inputs.new_branch_name }}"
|
||||||
|
full_body="${{ steps.pr_body.outputs.pr_body }} $branch_details"
|
||||||
|
echo "pr_full_body=$full_body" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Open Pull Request"
|
||||||
|
id: create_pr
|
||||||
|
run: |
|
||||||
|
pr_url=$(gh pr create -B main -H ${{ needs.create_temp_branch.outputs.temp_branch_name }} -l "Skip Changelog" -t "${{ steps.pr_title.outputs.pr_title }}" -b "${{ steps.pr_body_branch.outputs.pr_full_body }}")
|
||||||
|
echo "pr_url=$pr_url" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "[Notification] Pull Request Opened"
|
||||||
|
run: |
|
||||||
|
message="PR opened at ${{ steps.create_pr.outputs.pr_url }}"
|
||||||
|
echo "::notice title="Pull Request Opened": $title::$message"
|
||||||
|
|
||||||
|
cut_new_branch:
|
||||||
|
# don't cut the new branch until we're done opening the PR against main
|
||||||
|
name: "Cut New Branch ${{ inputs.new_branch_name }}"
|
||||||
|
needs: [open_pr]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: "Ensure New Branch Does Not Exist"
|
||||||
|
id: check_new_branch
|
||||||
|
run: |
|
||||||
|
title="Check New Branch Existence"
|
||||||
|
if git show-ref --quiet ${{ inputs.new_branch_name }}; then
|
||||||
|
message="Branch ${{ inputs.new_branch_name }} already exists. Exiting."
|
||||||
|
echo "::error $title::$message"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Create New Release Branch"
|
||||||
|
run: |
|
||||||
|
git checkout -b ${{ inputs.new_branch_name }}
|
||||||
|
|
||||||
|
- name: "Push up New Branch"
|
||||||
|
run: |
|
||||||
|
#Data for commit
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git push --set-upstream origin ${{ inputs.new_branch_name }}
|
||||||
|
|
||||||
|
- name: "[Notification] New branch created"
|
||||||
|
run: |
|
||||||
|
message="New branch ${{ inputs.new_branch_name }} created"
|
||||||
|
echo "::notice title="New branch created": $title::$message"
|
||||||
|
|
||||||
|
- name: "Bump dependencies via script"
|
||||||
|
# This bumps the dependency on dbt-core in the adapters
|
||||||
|
if: ${{ !contains(github.repository, 'dbt-core') }}
|
||||||
|
run: |
|
||||||
|
echo ${{ github.repository }}
|
||||||
|
echo "running update_dependencies script"
|
||||||
|
bash ${GITHUB_WORKSPACE}/.github/scripts/update_dependencies.sh ${{ inputs.new_branch_name }}
|
||||||
|
commit_message="bumping .latest branch variable in update_dependencies.sh to ${{ inputs.new_branch_name }}"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "$commit_message"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: "Bump env variable via script"
|
||||||
|
# bumps the RELEASE_BRANCH variable in nightly-release.yml in adapters
|
||||||
|
if: ${{ !contains(github.repository, 'dbt-core') }}
|
||||||
|
run: |
|
||||||
|
file="./.github/scripts/update_release_branch.sh"
|
||||||
|
if test -f "$file"; then
|
||||||
|
echo ${{ github.repository }}
|
||||||
|
echo "running some script yet to be written now"
|
||||||
|
bash $file ${{ inputs.new_branch_name }}
|
||||||
|
commit_message="updating env variable to ${{ inputs.new_branch_name }} in nightly-release.yml"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "$commit_message"
|
||||||
|
git push
|
||||||
|
else
|
||||||
|
echo "no $file seen skipping step"
|
||||||
|
fi
|
||||||
|
|||||||
88
.github/workflows/main.yml
vendored
88
.github/workflows/main.yml
vendored
@@ -54,22 +54,28 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
make dev
|
python -m pip install hatch
|
||||||
make dev_req
|
cd core
|
||||||
mypy --version
|
hatch run setup
|
||||||
dbt --version
|
|
||||||
|
- name: Verify dbt installation
|
||||||
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run dbt --version
|
||||||
|
|
||||||
- name: Run pre-commit hooks
|
- name: Run pre-commit hooks
|
||||||
run: pre-commit run --all-files --show-diff-on-failure
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run code-quality
|
||||||
|
|
||||||
unit:
|
unit:
|
||||||
name: unit test / python ${{ matrix.python-version }}
|
name: "unit test / python ${{ matrix.python-version }}"
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
@@ -77,10 +83,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13" ]
|
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
env:
|
|
||||||
TOXENV: "unit"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
@@ -95,15 +98,15 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
python -m pip install tox
|
python -m pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
with:
|
with:
|
||||||
timeout_minutes: 10
|
timeout_minutes: 10
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -e unit
|
command: cd core && hatch run ci:unit-tests
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
@@ -118,6 +121,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: unit
|
flags: unit
|
||||||
|
fail_ci_if_error: false
|
||||||
|
|
||||||
integration-metadata:
|
integration-metadata:
|
||||||
name: integration test metadata generation
|
name: integration test metadata generation
|
||||||
@@ -155,7 +159,7 @@ jobs:
|
|||||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
integration-postgres:
|
integration-postgres:
|
||||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
name: "(${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}"
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@@ -164,11 +168,10 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [ "3.10", "3.11", "3.12", "3.13" ]
|
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||||
os: ["ubuntu-latest"]
|
os: ["ubuntu-latest"]
|
||||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
env:
|
env:
|
||||||
TOXENV: integration
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
DBT_INVOCATION_ENV: github-actions
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
@@ -218,17 +221,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
python -m pip install tox
|
python -m pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
with:
|
with:
|
||||||
timeout_minutes: 30
|
timeout_minutes: 30
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -- --ddtrace
|
shell: bash
|
||||||
env:
|
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
@@ -249,6 +251,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: integration
|
flags: integration
|
||||||
|
fail_ci_if_error: false
|
||||||
|
|
||||||
integration-mac-windows:
|
integration-mac-windows:
|
||||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||||
@@ -263,7 +266,6 @@ jobs:
|
|||||||
# already includes split group and runs mac + windows
|
# already includes split group and runs mac + windows
|
||||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||||
env:
|
env:
|
||||||
TOXENV: integration
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
DBT_INVOCATION_ENV: github-actions
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
@@ -300,17 +302,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
python -m pip install tox
|
python -m pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
with:
|
with:
|
||||||
timeout_minutes: 30
|
timeout_minutes: 30
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -- --ddtrace
|
shell: bash
|
||||||
env:
|
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
@@ -331,6 +332,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: integration
|
flags: integration
|
||||||
|
fail_ci_if_error: false
|
||||||
|
|
||||||
integration-report:
|
integration-report:
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
@@ -361,12 +363,12 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
python -m pip install --upgrade hatch twine check-wheel-contents
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
|
|
||||||
- name: Build distributions
|
- name: Build distributions
|
||||||
@@ -375,27 +377,7 @@ jobs:
|
|||||||
- name: Show distributions
|
- name: Show distributions
|
||||||
run: ls -lh dist/
|
run: ls -lh dist/
|
||||||
|
|
||||||
- name: Check distribution descriptions
|
- name: Check and verify distributions
|
||||||
run: |
|
run: |
|
||||||
twine check dist/*
|
cd core
|
||||||
|
hatch run build:check-all
|
||||||
- name: Check wheel contents
|
|
||||||
run: |
|
|
||||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
|
||||||
|
|
||||||
- name: Install wheel distributions
|
|
||||||
run: |
|
|
||||||
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check wheel distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
- name: Install source distributions
|
|
||||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
|
||||||
run: |
|
|
||||||
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check source distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|||||||
2
.github/workflows/nightly-release.yml
vendored
2
.github/workflows/nightly-release.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
|||||||
- name: "Get Current Version Number"
|
- name: "Get Current Version Number"
|
||||||
id: version-number-sources
|
id: version-number-sources
|
||||||
run: |
|
run: |
|
||||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
current_version=$(grep '^version = ' core/dbt/__version__.py | sed 's/version = "\(.*\)"/\1/')
|
||||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: "Audit Version And Parse Into Parts"
|
- name: "Audit Version And Parse Into Parts"
|
||||||
|
|||||||
22
.github/workflows/schema-check.yml
vendored
22
.github/workflows/schema-check.yml
vendored
@@ -22,7 +22,7 @@ on:
|
|||||||
target_branch:
|
target_branch:
|
||||||
description: "The branch to check against"
|
description: "The branch to check against"
|
||||||
type: string
|
type: string
|
||||||
default: 'main'
|
default: "main"
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
# no special access is needed
|
# no special access is needed
|
||||||
@@ -48,8 +48,8 @@ jobs:
|
|||||||
- name: Checkout dbt repo
|
- name: Checkout dbt repo
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||||
ref: ${{ inputs.target_branch }}
|
ref: ${{ inputs.target_branch }}
|
||||||
|
|
||||||
- name: Check for changes in core/dbt/artifacts
|
- name: Check for changes in core/dbt/artifacts
|
||||||
# https://github.com/marketplace/actions/paths-changes-filter
|
# https://github.com/marketplace/actions/paths-changes-filter
|
||||||
@@ -72,18 +72,16 @@ jobs:
|
|||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: dbt-labs/schemas.getdbt.com
|
repository: dbt-labs/schemas.getdbt.com
|
||||||
ref: 'main'
|
ref: "main"
|
||||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||||
|
|
||||||
- name: Generate current schema
|
- name: Generate current schema
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
cd ${{ env.DBT_REPO_DIRECTORY }}/core
|
||||||
python3 -m venv env
|
pip install --upgrade pip hatch
|
||||||
source env/bin/activate
|
hatch run setup
|
||||||
pip install --upgrade pip
|
hatch run json-schema -- --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
|
||||||
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
|
||||||
|
|
||||||
# Copy generated schema files into the schemas.getdbt.com repo
|
# Copy generated schema files into the schemas.getdbt.com repo
|
||||||
# Do a git diff to find any changes
|
# Do a git diff to find any changes
|
||||||
@@ -99,5 +97,5 @@ jobs:
|
|||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
||||||
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
||||||
with:
|
with:
|
||||||
name: 'schema_changes.txt'
|
name: "schema_changes.txt"
|
||||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
path: "${{ env.SCHEMA_DIFF_ARTIFACT }}"
|
||||||
|
|||||||
@@ -102,8 +102,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
pip --version
|
pip --version
|
||||||
pip install tox
|
pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Run postgres setup script
|
- name: Run postgres setup script
|
||||||
run: |
|
run: |
|
||||||
@@ -123,7 +123,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
timeout_minutes: 30
|
timeout_minutes: 30
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -e integration -- -nauto
|
command: cd core && hatch run ci:integration-tests -- -nauto
|
||||||
env:
|
env:
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
|
|
||||||
|
|||||||
41
.github/workflows/test-repeater.yml
vendored
41
.github/workflows/test-repeater.yml
vendored
@@ -14,33 +14,33 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
branch:
|
branch:
|
||||||
description: 'Branch to check out'
|
description: "Branch to check out"
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
default: 'main'
|
default: "main"
|
||||||
test_path:
|
test_path:
|
||||||
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
description: "Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)"
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
default: 'tests/functional/...'
|
default: "tests/functional/..."
|
||||||
python_version:
|
python_version:
|
||||||
description: 'Version of Python to Test Against'
|
description: "Version of Python to Test Against"
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- '3.10'
|
- "3.10"
|
||||||
- '3.11'
|
- "3.11"
|
||||||
os:
|
os:
|
||||||
description: 'OS to run test in'
|
description: "OS to run test in"
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- 'ubuntu-latest'
|
- "ubuntu-latest"
|
||||||
- 'macos-14'
|
- "macos-14"
|
||||||
- 'windows-latest'
|
- "windows-latest"
|
||||||
num_runs_per_batch:
|
num_runs_per_batch:
|
||||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
description: "Max number of times to run the test per batch. We always run 10 batches."
|
||||||
type: number
|
type: number
|
||||||
required: true
|
required: true
|
||||||
default: '50'
|
default: "50"
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
@@ -90,12 +90,19 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "${{ inputs.python_version }}"
|
python-version: "${{ inputs.python_version }}"
|
||||||
|
|
||||||
|
- name: "Install hatch"
|
||||||
|
run: python -m pip install --user --upgrade pip hatch
|
||||||
|
|
||||||
- name: "Setup Dev Environment"
|
- name: "Setup Dev Environment"
|
||||||
run: make dev
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run setup
|
||||||
|
|
||||||
- name: "Set up postgres (linux)"
|
- name: "Set up postgres (linux)"
|
||||||
if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
|
if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
|
||||||
run: make setup-db
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run setup-db
|
||||||
|
|
||||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||||
- name: Set up postgres (macos)
|
- name: Set up postgres (macos)
|
||||||
@@ -153,5 +160,5 @@ jobs:
|
|||||||
- name: "Error for Failures"
|
- name: "Error for Failures"
|
||||||
if: ${{ steps.pytest.outputs.failure }}
|
if: ${{ steps.pytest.outputs.failure }}
|
||||||
run: |
|
run: |
|
||||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||||
exit 1
|
exit 1
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -15,6 +15,7 @@ build/
|
|||||||
!core/dbt/docs/build
|
!core/dbt/docs/build
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
|
dist-*/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
@@ -95,6 +96,7 @@ target/
|
|||||||
# pycharm
|
# pycharm
|
||||||
.idea/
|
.idea/
|
||||||
venv/
|
venv/
|
||||||
|
.venv*/
|
||||||
|
|
||||||
# AWS credentials
|
# AWS credentials
|
||||||
.aws/
|
.aws/
|
||||||
|
|||||||
83
CHANGELOG.md
83
CHANGELOG.md
@@ -5,6 +5,88 @@
|
|||||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|
||||||
|
## dbt-core 1.11.0-rc3 - December 08, 2025
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Raise jsonschema-based deprecation warnings by default ([#12240](https://github.com/dbt-labs/dbt-core/issues/12240))
|
||||||
|
- :bug: :snowman: Disable unit tests whose model is disabled ([#10540](https://github.com/dbt-labs/dbt-core/issues/10540))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test ([#12223](https://github.com/dbt-labs/dbt-core/issues/12223))
|
||||||
|
- :bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better cross-source matching and word boundaries ([#12239](https://github.com/dbt-labs/dbt-core/issues/12239))
|
||||||
|
- :bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs ([#12244](https://github.com/dbt-labs/dbt-core/issues/12244))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- Replace setuptools and tox with hatch for build, test, and environment management. ([#12151](https://github.com/dbt-labs/dbt-core/issues/12151))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@emmyoop](https://github.com/emmyoop) ([#12239](https://github.com/dbt-labs/dbt-core/issues/12239), [#12151](https://github.com/dbt-labs/dbt-core/issues/12151))
|
||||||
|
- [@mattogburke](https://github.com/mattogburke) ([#12223](https://github.com/dbt-labs/dbt-core/issues/12223))
|
||||||
|
- [@michelleark](https://github.com/michelleark) ([#12240](https://github.com/dbt-labs/dbt-core/issues/12240), [#10540](https://github.com/dbt-labs/dbt-core/issues/10540), [#12244](https://github.com/dbt-labs/dbt-core/issues/12244))
|
||||||
|
|
||||||
|
|
||||||
|
## dbt-core 1.11.0-rc2 - December 01, 2025
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Support partial parsing for function nodes ([#12072](https://github.com/dbt-labs/dbt-core/issues/12072))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Allow dbt deps to run when vars lack defaults in dbt_project.yml ([#8913](https://github.com/dbt-labs/dbt-core/issues/8913))
|
||||||
|
- Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names` ([#12152](https://github.com/dbt-labs/dbt-core/issues/12152))
|
||||||
|
- Allow the usage of `function` with `--exclude-resource-type` flag ([#12143](https://github.com/dbt-labs/dbt-core/issues/12143))
|
||||||
|
- Fix bug where schemas of functions weren't guaranteed to exist ([#12142](https://github.com/dbt-labs/dbt-core/issues/12142))
|
||||||
|
- :bug: :snowman: Correctly reference foreign key references when --defer and --state provided ([#11885](https://github.com/dbt-labs/dbt-core/issues/11885))
|
||||||
|
- Fix generation of deprecations summary ([#12146](https://github.com/dbt-labs/dbt-core/issues/12146))
|
||||||
|
- :bug: :snowman: Add exception when using --state and referring to a removed test ([#10630](https://github.com/dbt-labs/dbt-core/issues/10630))
|
||||||
|
- :bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command ([#11627](https://github.com/dbt-labs/dbt-core/issues/11627))
|
||||||
|
- :bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements ([#12049](https://github.com/dbt-labs/dbt-core/issues/12049))
|
||||||
|
- :bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g., varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo ([#11186](https://github.com/dbt-labs/dbt-core/issues/11186))
|
||||||
|
- :bug: :snowman: Support unit testing models that depend on sources with the same name ([#11975](https://github.com/dbt-labs/dbt-core/issues/11975), [#10433](https://github.com/dbt-labs/dbt-core/issues/10433))
|
||||||
|
- :bug: :snowman: Avoid retrying successful run-operation commands ([#11850](https://github.com/dbt-labs/dbt-core/issues/11850))
|
||||||
|
- :bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned: false` ([#9104](https://github.com/dbt-labs/dbt-core/issues/9104))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- Update jsonschemas for schema.yml and dbt_project.yml deprecations ([#12180](https://github.com/dbt-labs/dbt-core/issues/12180))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@asiunov](https://github.com/asiunov) ([#12146](https://github.com/dbt-labs/dbt-core/issues/12146))
|
||||||
|
- [@michellark](https://github.com/michellark) ([#11885](https://github.com/dbt-labs/dbt-core/issues/11885))
|
||||||
|
|
||||||
|
## dbt-core 1.11.0-rc1 - November 18, 2025
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Allow for defining funciton arguments with default values ([#12044](https://github.com/dbt-labs/dbt-core/issues/12044))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fix parse error when build_after.count set to 0 ([#12136](https://github.com/dbt-labs/dbt-core/issues/12136))
|
||||||
|
- Stop compiling python udfs like python models ([#12153](https://github.com/dbt-labs/dbt-core/issues/12153))
|
||||||
|
- For metric names, fix bug allowing hyphens (not allowed in metricflow already), make validation throw ValidationErrors (not ParsingErrors), and add tests. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||||
|
- Include macros in unit test parsing ([#10157](https://github.com/dbt-labs/dbt-core/issues/10157))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- add dbt/jsonschemas to manifest.in ([#12126](https://github.com/dbt-labs/dbt-core/issues/12126))
|
||||||
|
- Move from setup.py to pyproject.toml ([#5696](https://github.com/dbt-labs/dbt-core/issues/5696))
|
||||||
|
- Fixes issue where config isn't propagated to metric from measure when set as create_metric:True ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
- Support DBT_ENGINE prefix for record-mode env vars ([#12149](https://github.com/dbt-labs/dbt-core/issues/12149))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Drop support for python 3.9 ([#12118](https://github.com/dbt-labs/dbt-core/issues/12118))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@WilliamDee](https://github.com/WilliamDee) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
- [@nathanskone](https://github.com/nathanskone) ([#10157](https://github.com/dbt-labs/dbt-core/issues/10157))
|
||||||
|
- [@theyostalservice](https://github.com/theyostalservice) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||||
|
|
||||||
## dbt-core 1.11.0-b4 - October 28, 2025
|
## dbt-core 1.11.0-b4 - October 28, 2025
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
@@ -29,7 +111,6 @@
|
|||||||
- [@12030](https://github.com/12030) ([#QMalcolm](https://github.com/dbt-labs/dbt-core/issues/QMalcolm))
|
- [@12030](https://github.com/12030) ([#QMalcolm](https://github.com/dbt-labs/dbt-core/issues/QMalcolm))
|
||||||
- [@WilliamDee](https://github.com/WilliamDee) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
- [@WilliamDee](https://github.com/WilliamDee) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.11.0-b3 - October 07, 2025
|
## dbt-core 1.11.0-b3 - October 07, 2025
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|||||||
120
CONTRIBUTING.md
120
CONTRIBUTING.md
@@ -20,9 +20,8 @@
|
|||||||
- [Testing](#testing)
|
- [Testing](#testing)
|
||||||
- [Initial setup](#initial-setup)
|
- [Initial setup](#initial-setup)
|
||||||
- [Test commands](#test-commands)
|
- [Test commands](#test-commands)
|
||||||
- [Makefile](#makefile)
|
- [Hatch scripts](#hatch-scripts)
|
||||||
- [`pre-commit`](#pre-commit)
|
- [`pre-commit`](#pre-commit)
|
||||||
- [`tox`](#tox)
|
|
||||||
- [`pytest`](#pytest)
|
- [`pytest`](#pytest)
|
||||||
- [Unit, Integration, Functional?](#unit-integration-functional)
|
- [Unit, Integration, Functional?](#unit-integration-functional)
|
||||||
- [Debugging](#debugging)
|
- [Debugging](#debugging)
|
||||||
@@ -35,7 +34,7 @@
|
|||||||
|
|
||||||
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
||||||
|
|
||||||
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development and package managers. Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||||
|
|
||||||
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
||||||
|
|
||||||
@@ -74,28 +73,22 @@ There are some tools that will be helpful to you in developing locally. While th
|
|||||||
|
|
||||||
These are the tools used in `dbt-core` development and testing:
|
These are the tools used in `dbt-core` development and testing:
|
||||||
|
|
||||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.10, 3.11, 3.12, and 3.13
|
- [`hatch`](https://hatch.pypa.io/) for build backend, environment management, and running tests across Python versions (3.10, 3.11, 3.12, and 3.13)
|
||||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||||
- [`black`](https://github.com/psf/black) for code formatting
|
- [`black`](https://github.com/psf/black) for code formatting
|
||||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||||
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
||||||
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
||||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
|
|
||||||
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
||||||
|
|
||||||
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
||||||
|
|
||||||
#### Virtual environments
|
#### Virtual environments
|
||||||
|
|
||||||
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
dbt-core uses [Hatch](https://hatch.pypa.io/) for dependency and environment management. Hatch automatically creates and manages isolated environments for development, testing, and building, so you don't need to manually create virtual environments.
|
||||||
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
|
||||||
```sh
|
|
||||||
python3 -m venv env
|
|
||||||
source env/bin/activate
|
|
||||||
```
|
|
||||||
|
|
||||||
This will create and activate a new Python virtual environment.
|
For more information on how Hatch manages environments, see the [Hatch environment documentation](https://hatch.pypa.io/latest/environment/).
|
||||||
|
|
||||||
#### Docker and `docker-compose`
|
#### Docker and `docker-compose`
|
||||||
|
|
||||||
@@ -114,22 +107,42 @@ brew install postgresql
|
|||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
First make sure you have Python 3.10 or later installed. Ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `hatch`. Finally set up `dbt-core` for development:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
make dev
|
cd core
|
||||||
|
hatch run setup
|
||||||
```
|
```
|
||||||
or, alternatively:
|
|
||||||
|
This will install all development dependencies and set up pre-commit hooks.
|
||||||
|
|
||||||
|
By default, hatch will use whatever Python version is active in your environment. To specify a particular Python version, set the `HATCH_PYTHON` environment variable:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
export HATCH_PYTHON=3.12
|
||||||
pre-commit install
|
hatch env create
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Or add it to your shell profile (e.g., `~/.zshrc` or `~/.bashrc`) for persistence.
|
||||||
|
|
||||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||||
|
|
||||||
|
#### Building dbt-core
|
||||||
|
|
||||||
|
dbt-core uses [Hatch](https://hatch.pypa.io/) (specifically `hatchling`) as its build backend. To build distribution packages:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd core
|
||||||
|
hatch build
|
||||||
|
```
|
||||||
|
|
||||||
|
This will create both wheel (`.whl`) and source distribution (`.tar.gz`) files in the `dist/` directory.
|
||||||
|
|
||||||
|
The build configuration is defined in `core/pyproject.toml`. You can also use the standard `python -m build` command if you prefer.
|
||||||
|
|
||||||
### Running `dbt-core`
|
### Running `dbt-core`
|
||||||
|
|
||||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
Once you've run `hatch run setup`, the `dbt` command will be available in your PATH. You can verify this by running `which dbt`.
|
||||||
|
|
||||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
||||||
|
|
||||||
@@ -147,9 +160,12 @@ Although `dbt-core` works with a number of different databases, you won't need t
|
|||||||
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
make setup-db
|
cd core
|
||||||
|
hatch run setup-db
|
||||||
```
|
```
|
||||||
or, alternatively:
|
|
||||||
|
Alternatively, you can run the setup commands directly:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker-compose up -d database
|
docker-compose up -d database
|
||||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
||||||
@@ -159,33 +175,63 @@ PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/s
|
|||||||
|
|
||||||
There are a few methods for running tests locally.
|
There are a few methods for running tests locally.
|
||||||
|
|
||||||
#### Makefile
|
#### Hatch scripts
|
||||||
|
|
||||||
There are multiple targets in the Makefile to run common test suites and code
|
The primary way to run tests and checks is using hatch scripts (defined in `core/hatch.toml`):
|
||||||
checks, most notably:
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Runs unit tests with py38 and code checks in parallel.
|
cd core
|
||||||
make test
|
|
||||||
# Runs postgres integration tests with py38 in "fail fast" mode.
|
|
||||||
make integration
|
|
||||||
```
|
|
||||||
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
|
||||||
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
|
||||||
|
|
||||||
Check out the other targets in the Makefile to see other commonly used test
|
# Run all unit tests
|
||||||
suites.
|
hatch run unit-tests
|
||||||
|
|
||||||
|
# Run unit tests and all code quality checks
|
||||||
|
hatch run test
|
||||||
|
|
||||||
|
# Run integration tests
|
||||||
|
hatch run integration-tests
|
||||||
|
|
||||||
|
# Run integration tests in fail-fast mode
|
||||||
|
hatch run integration-tests-fail-fast
|
||||||
|
|
||||||
|
# Run linting checks only
|
||||||
|
hatch run lint
|
||||||
|
hatch run flake8
|
||||||
|
hatch run mypy
|
||||||
|
hatch run black
|
||||||
|
|
||||||
|
# Run all pre-commit hooks
|
||||||
|
hatch run code-quality
|
||||||
|
|
||||||
|
# Clean build artifacts
|
||||||
|
hatch run clean
|
||||||
|
```
|
||||||
|
|
||||||
|
Hatch manages isolated environments and dependencies automatically. The commands above use the `default` environment which is recommended for most local development.
|
||||||
|
|
||||||
|
**Using the `ci` environment (optional)**
|
||||||
|
|
||||||
|
If you need to replicate exactly what runs in GitHub Actions (e.g., with coverage reporting), use the `ci` environment:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd core
|
||||||
|
|
||||||
|
# Run unit tests with coverage
|
||||||
|
hatch run ci:unit-tests
|
||||||
|
|
||||||
|
# Run unit tests with a specific Python version
|
||||||
|
hatch run +py=3.11 ci:unit-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** Most developers should use the default environment (`hatch run unit-tests`). The `ci` environment is primarily for debugging CI failures or running tests with coverage.
|
||||||
|
|
||||||
#### `pre-commit`
|
#### `pre-commit`
|
||||||
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This command installs several pip executables including black, mypy, and flake8. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
|
||||||
|
|
||||||
#### `tox`
|
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `hatch run setup` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This installs several pip executables including black, mypy, and flake8. Once installed, hooks will run automatically on `git commit`, or you can run them manually with `hatch run code-quality`.
|
||||||
|
|
||||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
|
|
||||||
|
|
||||||
#### `pytest`
|
#### `pytest`
|
||||||
|
|
||||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:
|
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. After running `hatch run setup`, you can run pytest commands like:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# run all unit tests in a file
|
# run all unit tests in a file
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V
|
|||||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||||
|
|
||||||
RUN pip3 install -U tox wheel six setuptools pre-commit
|
RUN pip3 install -U hatch wheel pre-commit
|
||||||
|
|
||||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||||
# On Linux, run `make .env` to create the .env file for the current user.
|
# On Linux, run `make .env` to create the .env file for the current user.
|
||||||
@@ -62,7 +62,6 @@ RUN if [ ${USER_ID:-0} -ne 0 ] && [ ${GROUP_ID:-0} -ne 0 ]; then \
|
|||||||
useradd -mU -l dbt_test_user; \
|
useradd -mU -l dbt_test_user; \
|
||||||
fi
|
fi
|
||||||
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
||||||
RUN mkdir /home/tox && chown dbt_test_user /home/tox
|
|
||||||
|
|
||||||
WORKDIR /usr/app
|
WORKDIR /usr/app
|
||||||
VOLUME /usr/app
|
VOLUME /usr/app
|
||||||
|
|||||||
163
Makefile
163
Makefile
@@ -1,146 +1,95 @@
|
|||||||
|
# ============================================================================
|
||||||
|
# DEPRECATED: This Makefile is maintained for backwards compatibility only.
|
||||||
|
#
|
||||||
|
# dbt-core now uses Hatch for task management and development workflows.
|
||||||
|
# Please migrate to using hatch commands directly:
|
||||||
|
#
|
||||||
|
# make dev → cd core && hatch run setup
|
||||||
|
# make unit → cd core && hatch run unit-tests
|
||||||
|
# make test → cd core && hatch run test
|
||||||
|
# make integration → cd core && hatch run integration-tests
|
||||||
|
# make lint → cd core && hatch run lint
|
||||||
|
# make code_quality → cd core && hatch run code-quality
|
||||||
|
# make setup-db → cd core && hatch run setup-db
|
||||||
|
# make clean → cd core && hatch run clean
|
||||||
|
#
|
||||||
|
# See core/pyproject.toml [tool.hatch.envs.default.scripts] for all available
|
||||||
|
# commands and CONTRIBUTING.md for detailed usage instructions.
|
||||||
|
#
|
||||||
|
# This Makefile will be removed in a future version of dbt-core.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
.DEFAULT_GOAL:=help
|
.DEFAULT_GOAL:=help
|
||||||
|
|
||||||
# Optional flag to run target in a docker container.
|
|
||||||
# (example `make test USE_DOCKER=true`)
|
|
||||||
ifeq ($(USE_DOCKER),true)
|
|
||||||
DOCKER_CMD := docker-compose run --rm test
|
|
||||||
endif
|
|
||||||
|
|
||||||
#
|
|
||||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
|
||||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
|
||||||
# DBT_TEST_USER_1=user
|
|
||||||
# LOG_DIR="dir with a space in it"
|
|
||||||
#
|
|
||||||
# Warn: Restrict each line to one variable only.
|
|
||||||
#
|
|
||||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
|
||||||
include ./makefile.test.env
|
|
||||||
endif
|
|
||||||
|
|
||||||
CI_FLAGS =\
|
|
||||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
|
||||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
|
||||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
|
||||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
|
||||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
|
||||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: dev_req
|
.PHONY: dev_req
|
||||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||||
@\
|
@cd core && hatch run dev-req
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
|
||||||
|
|
||||||
.PHONY: dev
|
.PHONY: dev
|
||||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
dev: ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||||
@\
|
@cd core && hatch run setup
|
||||||
$(DOCKER_CMD) pre-commit install
|
|
||||||
|
|
||||||
.PHONY: dev-uninstall
|
.PHONY: dev-uninstall
|
||||||
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||||
@\
|
@pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
||||||
pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
pip uninstall -y dbt-core
|
||||||
pip uninstall -y dbt-core
|
|
||||||
|
|
||||||
.PHONY: mypy
|
.PHONY: mypy
|
||||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
mypy: ## Runs mypy against staged changes for static type checking.
|
||||||
@\
|
@cd core && hatch run mypy
|
||||||
$(DOCKER_CMD) pre-commit run --hook-stage manual mypy-check | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: flake8
|
.PHONY: flake8
|
||||||
flake8: .env ## Runs flake8 against staged changes to enforce style guide.
|
flake8: ## Runs flake8 against staged changes to enforce style guide.
|
||||||
@\
|
@cd core && hatch run flake8
|
||||||
$(DOCKER_CMD) pre-commit run --hook-stage manual flake8-check | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: black
|
.PHONY: black
|
||||||
black: .env ## Runs black against staged changes to enforce style guide.
|
black: ## Runs black against staged changes to enforce style guide.
|
||||||
@\
|
@cd core && hatch run black
|
||||||
$(DOCKER_CMD) pre-commit run --hook-stage manual black-check -v | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: lint
|
.PHONY: lint
|
||||||
lint: .env ## Runs flake8 and mypy code checks against staged changes.
|
lint: ## Runs flake8 and mypy code checks against staged changes.
|
||||||
@\
|
@cd core && hatch run lint
|
||||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
|
||||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
.PHONY: code_quality
|
||||||
|
code_quality: ## Runs all pre-commit hooks against all files.
|
||||||
|
@cd core && hatch run code-quality
|
||||||
|
|
||||||
.PHONY: unit
|
.PHONY: unit
|
||||||
unit: .env ## Runs unit tests with py
|
unit: ## Runs unit tests with py
|
||||||
@\
|
@cd core && hatch run unit-tests
|
||||||
$(DOCKER_CMD) tox -e py
|
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: .env ## Runs unit tests with py and code checks against staged changes.
|
test: ## Runs unit tests with py and code checks against staged changes.
|
||||||
@\
|
@cd core && hatch run test
|
||||||
$(DOCKER_CMD) tox -e py; \
|
|
||||||
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
|
||||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
|
||||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: integration
|
.PHONY: integration
|
||||||
integration: .env ## Runs core integration tests using postgres with py-integration
|
integration: ## Runs core integration tests using postgres with py-integration
|
||||||
@\
|
@cd core && hatch run integration-tests
|
||||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
|
||||||
|
|
||||||
.PHONY: integration-fail-fast
|
.PHONY: integration-fail-fast
|
||||||
integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
integration-fail-fast: ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
||||||
@\
|
@cd core && hatch run integration-tests-fail-fast
|
||||||
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
|
||||||
|
|
||||||
.PHONY: interop
|
|
||||||
interop: clean
|
|
||||||
@\
|
|
||||||
mkdir $(LOG_DIR) && \
|
|
||||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
|
||||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
|
||||||
|
|
||||||
.PHONY: setup-db
|
.PHONY: setup-db
|
||||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||||
@\
|
@cd core && hatch run setup-db
|
||||||
docker compose up -d database && \
|
|
||||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres SKIP_HOMEBREW=true bash test/setup_db.sh
|
|
||||||
|
|
||||||
# This rule creates a file named .env that is used by docker-compose for passing
|
|
||||||
# the USER_ID and GROUP_ID arguments to the Docker image.
|
|
||||||
.env: ## Setup step for using using docker-compose with make target.
|
|
||||||
@touch .env
|
|
||||||
ifneq ($(OS),Windows_NT)
|
|
||||||
ifneq ($(shell uname -s), Darwin)
|
|
||||||
@echo USER_ID=$(shell id -u) > .env
|
|
||||||
@echo GROUP_ID=$(shell id -g) >> .env
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: ## Resets development environment.
|
clean: ## Resets development environment.
|
||||||
@echo 'cleaning repo...'
|
@cd core && hatch run clean
|
||||||
@rm -f .coverage
|
|
||||||
@rm -f .coverage.*
|
|
||||||
@rm -rf .eggs/
|
|
||||||
@rm -f .env
|
|
||||||
@rm -rf .tox/
|
|
||||||
@rm -rf build/
|
|
||||||
@rm -rf dbt.egg-info/
|
|
||||||
@rm -f dbt_project.yml
|
|
||||||
@rm -rf dist/
|
|
||||||
@rm -f htmlcov/*.{css,html,js,json,png}
|
|
||||||
@rm -rf logs/
|
|
||||||
@rm -rf target/
|
|
||||||
@find . -type f -name '*.pyc' -delete
|
|
||||||
@find . -type d -name '__pycache__' -depth -delete
|
|
||||||
@echo 'done.'
|
|
||||||
|
|
||||||
|
.PHONY: json_schema
|
||||||
|
json_schema: ## Update generated JSON schema using code changes.
|
||||||
|
@cd core && hatch run json-schema
|
||||||
|
|
||||||
.PHONY: help
|
.PHONY: help
|
||||||
help: ## Show this help message.
|
help: ## Show this help message.
|
||||||
@echo 'usage: make [target] [USE_DOCKER=true]'
|
@echo 'usage: make [target]'
|
||||||
|
@echo
|
||||||
|
@echo 'DEPRECATED: This Makefile is a compatibility shim.'
|
||||||
|
@echo 'Please use "cd core && hatch run <command>" directly.'
|
||||||
@echo
|
@echo
|
||||||
@echo 'targets:'
|
@echo 'targets:'
|
||||||
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||||
@echo
|
@echo
|
||||||
@echo 'options:'
|
@echo 'For more information, see CONTRIBUTING.md'
|
||||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
|
||||||
|
|
||||||
.PHONY: json_schema
|
|
||||||
json_schema: ## Update generated JSON schema using code changes.
|
|
||||||
scripts/collect-artifact-schema.py --path schemas
|
|
||||||
|
|||||||
27
codecov.yml
27
codecov.yml
@@ -2,39 +2,22 @@ ignore:
|
|||||||
- ".github"
|
- ".github"
|
||||||
- ".changes"
|
- ".changes"
|
||||||
|
|
||||||
|
# Disable all status checks to prevent red X's in CI
|
||||||
|
# Coverage data is still uploaded and PR comments are still posted
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
project:
|
project: off
|
||||||
default:
|
patch: off
|
||||||
target: auto
|
|
||||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
|
||||||
informational: true
|
|
||||||
patch:
|
|
||||||
default:
|
|
||||||
target: auto
|
|
||||||
threshold: 80%
|
|
||||||
informational: true
|
|
||||||
|
|
||||||
comment:
|
comment:
|
||||||
layout: "header, diff, flags, components" # show component info in the PR comment
|
layout: "header, diff, flags, components" # show component info in the PR comment
|
||||||
|
|
||||||
component_management:
|
component_management:
|
||||||
default_rules: # default rules that will be inherited by all components
|
|
||||||
statuses:
|
|
||||||
- type: project # in this case every component that doens't have a status defined will have a project type one
|
|
||||||
target: auto
|
|
||||||
threshold: 0.1%
|
|
||||||
- type: patch
|
|
||||||
target: 80%
|
|
||||||
individual_components:
|
individual_components:
|
||||||
- component_id: unittests
|
- component_id: unittests
|
||||||
name: "Unit Tests"
|
name: "Unit Tests"
|
||||||
flag_regexes:
|
flag_regexes:
|
||||||
- "unit"
|
- "unit"
|
||||||
statuses:
|
|
||||||
- type: patch
|
|
||||||
target: 80%
|
|
||||||
threshold: 5%
|
|
||||||
- component_id: integrationtests
|
- component_id: integrationtests
|
||||||
name: "Integration Tests"
|
name: "Integration Tests"
|
||||||
flag_regexes:
|
flag_regexes:
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
|
||||||
include dbt/py.typed
|
|
||||||
recursive-include dbt/task/docs *.html
|
|
||||||
recursive-include dbt/jsonschemas *.json
|
|
||||||
1
core/dbt/__version__.py
Normal file
1
core/dbt/__version__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
version = "1.11.0rc3"
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import List, Literal, Optional
|
from typing import Any, List, Literal, Optional
|
||||||
|
|
||||||
from dbt.artifacts.resources.types import FunctionType, FunctionVolatility, NodeType
|
from dbt.artifacts.resources.types import FunctionType, FunctionVolatility, NodeType
|
||||||
from dbt.artifacts.resources.v1.components import CompiledResource
|
from dbt.artifacts.resources.v1.components import CompiledResource
|
||||||
@@ -32,6 +32,7 @@ class FunctionArgument(dbtClassMixin):
|
|||||||
name: str
|
name: str
|
||||||
data_type: str
|
data_type: str
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
|
default_value: Optional[Any] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -235,6 +235,7 @@ exclude_resource_type = _create_option_and_track_env_var(
|
|||||||
"exposure",
|
"exposure",
|
||||||
"snapshot",
|
"snapshot",
|
||||||
"seed",
|
"seed",
|
||||||
|
"function",
|
||||||
"default",
|
"default",
|
||||||
],
|
],
|
||||||
case_sensitive=False,
|
case_sensitive=False,
|
||||||
|
|||||||
@@ -291,8 +291,22 @@ def project(func):
|
|||||||
flags = ctx.obj["flags"]
|
flags = ctx.obj["flags"]
|
||||||
# TODO deprecations warnings fired from loading the project will lack
|
# TODO deprecations warnings fired from loading the project will lack
|
||||||
# the project_id in the snowplow event.
|
# the project_id in the snowplow event.
|
||||||
|
|
||||||
|
# Determine if vars should be required during project loading.
|
||||||
|
# Commands that don't need vars evaluated (like 'deps', 'clean')
|
||||||
|
# should use lenient mode (require_vars=False) to allow missing vars.
|
||||||
|
# Commands that validate or execute (like 'run', 'compile', 'build', 'debug') should use
|
||||||
|
# strict mode (require_vars=True) to show helpful "Required var X not found" errors.
|
||||||
|
# If adding more commands to lenient mode, update this condition.
|
||||||
|
require_vars = flags.WHICH != "deps"
|
||||||
|
|
||||||
project = load_project(
|
project = load_project(
|
||||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS, validate=True
|
flags.PROJECT_DIR,
|
||||||
|
flags.VERSION_CHECK,
|
||||||
|
ctx.obj["profile"],
|
||||||
|
flags.VARS,
|
||||||
|
validate=True,
|
||||||
|
require_vars=require_vars,
|
||||||
)
|
)
|
||||||
ctx.obj["project"] = project
|
ctx.obj["project"] = project
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ from dbt.contracts.graph.nodes import (
|
|||||||
SeedNode,
|
SeedNode,
|
||||||
UnitTestDefinition,
|
UnitTestDefinition,
|
||||||
UnitTestNode,
|
UnitTestNode,
|
||||||
|
UnitTestSourceDefinition,
|
||||||
)
|
)
|
||||||
from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
@@ -566,7 +567,12 @@ class Compiler:
|
|||||||
|
|
||||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||||
|
|
||||||
new_cte_name = self.add_ephemeral_prefix(cte_model.identifier)
|
cte_name = (
|
||||||
|
cte_model.cte_name
|
||||||
|
if isinstance(cte_model, UnitTestSourceDefinition)
|
||||||
|
else cte_model.identifier
|
||||||
|
)
|
||||||
|
new_cte_name = self.add_ephemeral_prefix(cte_name)
|
||||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
||||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||||
|
|
||||||
@@ -654,8 +660,15 @@ class Compiler:
|
|||||||
raise GraphDependencyNotFoundError(node, to_expression)
|
raise GraphDependencyNotFoundError(node, to_expression)
|
||||||
|
|
||||||
adapter = get_adapter(self.config)
|
adapter = get_adapter(self.config)
|
||||||
relation_name = str(adapter.Relation.create_from(self.config, foreign_key_node))
|
|
||||||
return relation_name
|
if (
|
||||||
|
hasattr(foreign_key_node, "defer_relation")
|
||||||
|
and foreign_key_node.defer_relation
|
||||||
|
and self.config.args.defer
|
||||||
|
):
|
||||||
|
return str(adapter.Relation.create_from(self.config, foreign_key_node.defer_relation))
|
||||||
|
else:
|
||||||
|
return str(adapter.Relation.create_from(self.config, foreign_key_node))
|
||||||
|
|
||||||
# This method doesn't actually "compile" any of the nodes. That is done by the
|
# This method doesn't actually "compile" any of the nodes. That is done by the
|
||||||
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
||||||
|
|||||||
@@ -101,7 +101,10 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
|||||||
_KEYPATH_HANDLERS = ProjectPostprocessor()
|
_KEYPATH_HANDLERS = ProjectPostprocessor()
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, profile: Optional[HasCredentials] = None, cli_vars: Optional[Dict[str, Any]] = None
|
self,
|
||||||
|
profile: Optional[HasCredentials] = None,
|
||||||
|
cli_vars: Optional[Dict[str, Any]] = None,
|
||||||
|
require_vars: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
# Generate contexts here because we want to save the context
|
# Generate contexts here because we want to save the context
|
||||||
# object in order to retrieve the env_vars. This is almost always
|
# object in order to retrieve the env_vars. This is almost always
|
||||||
@@ -109,10 +112,19 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
|||||||
# even when we don't have a profile.
|
# even when we don't have a profile.
|
||||||
if cli_vars is None:
|
if cli_vars is None:
|
||||||
cli_vars = {}
|
cli_vars = {}
|
||||||
|
# Store profile and cli_vars for creating strict context later
|
||||||
|
self.profile = profile
|
||||||
|
self.cli_vars = cli_vars
|
||||||
|
|
||||||
|
# By default, require vars (strict mode) for proper error messages.
|
||||||
|
# Commands that don't need vars (like 'deps') should explicitly pass
|
||||||
|
# require_vars=False for lenient loading.
|
||||||
if profile:
|
if profile:
|
||||||
self.ctx_obj = TargetContext(profile.to_target_dict(), cli_vars)
|
self.ctx_obj = TargetContext(
|
||||||
|
profile.to_target_dict(), cli_vars, require_vars=require_vars
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
self.ctx_obj = BaseContext(cli_vars, require_vars=require_vars) # type:ignore
|
||||||
context = self.ctx_obj.to_dict()
|
context = self.ctx_obj.to_dict()
|
||||||
super().__init__(context)
|
super().__init__(context)
|
||||||
|
|
||||||
|
|||||||
@@ -52,9 +52,10 @@ def load_project(
|
|||||||
profile: HasCredentials,
|
profile: HasCredentials,
|
||||||
cli_vars: Optional[Dict[str, Any]] = None,
|
cli_vars: Optional[Dict[str, Any]] = None,
|
||||||
validate: bool = False,
|
validate: bool = False,
|
||||||
|
require_vars: bool = True,
|
||||||
) -> Project:
|
) -> Project:
|
||||||
# get the project with all of the provided information
|
# get the project with all of the provided information
|
||||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
project_renderer = DbtProjectYamlRenderer(profile, cli_vars, require_vars=require_vars)
|
||||||
project = Project.from_project_root(
|
project = Project.from_project_root(
|
||||||
project_root, project_renderer, verify_version=version_check, validate=validate
|
project_root, project_renderer, verify_version=version_check, validate=validate
|
||||||
)
|
)
|
||||||
@@ -267,7 +268,14 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
args,
|
args,
|
||||||
)
|
)
|
||||||
flags = get_flags()
|
flags = get_flags()
|
||||||
project = load_project(project_root, bool(flags.VERSION_CHECK), profile, cli_vars)
|
# For dbt deps, use lenient var validation to allow missing vars
|
||||||
|
# For all other commands, use strict validation for helpful error messages
|
||||||
|
# If command is not set (e.g., during test setup), default to strict mode
|
||||||
|
# unless the command is explicitly "deps"
|
||||||
|
require_vars = getattr(flags, "WHICH", None) != "deps"
|
||||||
|
project = load_project(
|
||||||
|
project_root, bool(flags.VERSION_CHECK), profile, cli_vars, require_vars=require_vars
|
||||||
|
)
|
||||||
return project, profile
|
return project, profile
|
||||||
|
|
||||||
# Called in task/base.py, in BaseTask.from_args
|
# Called in task/base.py, in BaseTask.from_args
|
||||||
|
|||||||
@@ -152,10 +152,12 @@ class Var:
|
|||||||
context: Mapping[str, Any],
|
context: Mapping[str, Any],
|
||||||
cli_vars: Mapping[str, Any],
|
cli_vars: Mapping[str, Any],
|
||||||
node: Optional[Resource] = None,
|
node: Optional[Resource] = None,
|
||||||
|
require_vars: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._context: Mapping[str, Any] = context
|
self._context: Mapping[str, Any] = context
|
||||||
self._cli_vars: Mapping[str, Any] = cli_vars
|
self._cli_vars: Mapping[str, Any] = cli_vars
|
||||||
self._node: Optional[Resource] = node
|
self._node: Optional[Resource] = node
|
||||||
|
self._require_vars: bool = require_vars
|
||||||
self._merged: Mapping[str, Any] = self._generate_merged()
|
self._merged: Mapping[str, Any] = self._generate_merged()
|
||||||
|
|
||||||
def _generate_merged(self) -> Mapping[str, Any]:
|
def _generate_merged(self) -> Mapping[str, Any]:
|
||||||
@@ -168,9 +170,11 @@ class Var:
|
|||||||
else:
|
else:
|
||||||
return "<Configuration>"
|
return "<Configuration>"
|
||||||
|
|
||||||
def get_missing_var(self, var_name: str) -> NoReturn:
|
def get_missing_var(self, var_name: str) -> None:
|
||||||
# TODO function name implies a non exception resolution
|
# Only raise an error if vars are _required_
|
||||||
raise RequiredVarNotFoundError(var_name, dict(self._merged), self._node)
|
if self._require_vars:
|
||||||
|
# TODO function name implies a non exception resolution
|
||||||
|
raise RequiredVarNotFoundError(var_name, dict(self._merged), self._node)
|
||||||
|
|
||||||
def has_var(self, var_name: str) -> bool:
|
def has_var(self, var_name: str) -> bool:
|
||||||
return var_name in self._merged
|
return var_name in self._merged
|
||||||
@@ -198,10 +202,11 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
_context_attrs_: Dict[str, Any]
|
_context_attrs_: Dict[str, Any]
|
||||||
|
|
||||||
# subclass is TargetContext
|
# subclass is TargetContext
|
||||||
def __init__(self, cli_vars: Dict[str, Any]) -> None:
|
def __init__(self, cli_vars: Dict[str, Any], require_vars: bool = True) -> None:
|
||||||
self._ctx: Dict[str, Any] = {}
|
self._ctx: Dict[str, Any] = {}
|
||||||
self.cli_vars: Dict[str, Any] = cli_vars
|
self.cli_vars: Dict[str, Any] = cli_vars
|
||||||
self.env_vars: Dict[str, Any] = {}
|
self.env_vars: Dict[str, Any] = {}
|
||||||
|
self.require_vars: bool = require_vars
|
||||||
|
|
||||||
def generate_builtins(self) -> Dict[str, Any]:
|
def generate_builtins(self) -> Dict[str, Any]:
|
||||||
builtins: Dict[str, Any] = {}
|
builtins: Dict[str, Any] = {}
|
||||||
@@ -307,7 +312,7 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
from events
|
from events
|
||||||
where event_type = '{{ var("event_type", "activation") }}'
|
where event_type = '{{ var("event_type", "activation") }}'
|
||||||
"""
|
"""
|
||||||
return Var(self._ctx, self.cli_vars)
|
return Var(self._ctx, self.cli_vars, require_vars=self.require_vars)
|
||||||
|
|
||||||
@contextmember()
|
@contextmember()
|
||||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ class ConfiguredContext(TargetContext):
|
|||||||
# subclasses are SchemaYamlContext, MacroResolvingContext, ManifestContext
|
# subclasses are SchemaYamlContext, MacroResolvingContext, ManifestContext
|
||||||
config: AdapterRequiredConfig
|
config: AdapterRequiredConfig
|
||||||
|
|
||||||
def __init__(self, config: AdapterRequiredConfig) -> None:
|
def __init__(self, config: AdapterRequiredConfig, require_vars: bool = True) -> None:
|
||||||
super().__init__(config.to_target_dict(), config.cli_vars)
|
super().__init__(config.to_target_dict(), config.cli_vars, require_vars=require_vars)
|
||||||
self.config = config
|
self.config = config
|
||||||
|
|
||||||
@contextproperty()
|
@contextproperty()
|
||||||
|
|||||||
@@ -854,7 +854,12 @@ class RuntimeUnitTestSourceResolver(BaseSourceResolver):
|
|||||||
# we just need to set_cte, but skipping it confuses typing. We *do* need
|
# we just need to set_cte, but skipping it confuses typing. We *do* need
|
||||||
# the relation in the "this" property.
|
# the relation in the "this" property.
|
||||||
self.model.set_cte(target_source.unique_id, None)
|
self.model.set_cte(target_source.unique_id, None)
|
||||||
return self.Relation.create_ephemeral_from(target_source)
|
|
||||||
|
identifier = self.Relation.add_ephemeral_prefix(target_source.cte_name)
|
||||||
|
return self.Relation.create(
|
||||||
|
type=self.Relation.CTE,
|
||||||
|
identifier=identifier,
|
||||||
|
).quote(identifier=False)
|
||||||
|
|
||||||
|
|
||||||
# metric` implementations
|
# metric` implementations
|
||||||
@@ -1905,6 +1910,21 @@ def generate_parser_model_context(
|
|||||||
return ctx.to_dict()
|
return ctx.to_dict()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_parser_unit_test_context(
|
||||||
|
unit_test: UnitTestNode, config: RuntimeConfig, manifest: Manifest
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
context_config = ContextConfig(
|
||||||
|
config,
|
||||||
|
unit_test.fqn,
|
||||||
|
NodeType.Unit,
|
||||||
|
config.project_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = UnitTestContext(unit_test, config, manifest, ParseProvider(), context_config)
|
||||||
|
|
||||||
|
return ctx.to_dict()
|
||||||
|
|
||||||
|
|
||||||
def generate_generate_name_macro_context(
|
def generate_generate_name_macro_context(
|
||||||
macro: Macro,
|
macro: Macro,
|
||||||
config: RuntimeConfig,
|
config: RuntimeConfig,
|
||||||
|
|||||||
@@ -5,8 +5,10 @@ from dbt.context.base import BaseContext, contextproperty
|
|||||||
|
|
||||||
class TargetContext(BaseContext):
|
class TargetContext(BaseContext):
|
||||||
# subclass is ConfiguredContext
|
# subclass is ConfiguredContext
|
||||||
def __init__(self, target_dict: Dict[str, Any], cli_vars: Dict[str, Any]):
|
def __init__(
|
||||||
super().__init__(cli_vars=cli_vars)
|
self, target_dict: Dict[str, Any], cli_vars: Dict[str, Any], require_vars: bool = True
|
||||||
|
):
|
||||||
|
super().__init__(cli_vars=cli_vars, require_vars=require_vars)
|
||||||
self.target_dict = target_dict
|
self.target_dict = target_dict
|
||||||
|
|
||||||
@contextproperty()
|
@contextproperty()
|
||||||
|
|||||||
@@ -161,6 +161,7 @@ class SourceFile(BaseSourceFile):
|
|||||||
docs: List[str] = field(default_factory=list)
|
docs: List[str] = field(default_factory=list)
|
||||||
macros: List[str] = field(default_factory=list)
|
macros: List[str] = field(default_factory=list)
|
||||||
env_vars: List[str] = field(default_factory=list)
|
env_vars: List[str] = field(default_factory=list)
|
||||||
|
functions: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def big_seed(cls, path: FilePath) -> "SourceFile":
|
def big_seed(cls, path: FilePath) -> "SourceFile":
|
||||||
|
|||||||
@@ -1715,9 +1715,10 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
self.exposures[exposure.unique_id] = exposure
|
self.exposures[exposure.unique_id] = exposure
|
||||||
source_file.exposures.append(exposure.unique_id)
|
source_file.exposures.append(exposure.unique_id)
|
||||||
|
|
||||||
def add_function(self, function: FunctionNode):
|
def add_function(self, source_file: SourceFile, function: FunctionNode):
|
||||||
_check_duplicates(function, self.functions)
|
_check_duplicates(function, self.functions)
|
||||||
self.functions[function.unique_id] = function
|
self.functions[function.unique_id] = function
|
||||||
|
source_file.functions.append(function.unique_id)
|
||||||
|
|
||||||
def add_metric(
|
def add_metric(
|
||||||
self, source_file: SchemaSourceFile, metric: Metric, generated_from: Optional[str] = None
|
self, source_file: SchemaSourceFile, metric: Metric, generated_from: Optional[str] = None
|
||||||
|
|||||||
@@ -697,6 +697,36 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_data_type_for_comparison(data_type: Optional[str]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Normalize a data type string by removing size, precision, and scale parameters.
|
||||||
|
This allows comparison of base types while ignoring non-breaking parameter changes.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
varchar(10) -> varchar
|
||||||
|
VARCHAR(5) -> varchar
|
||||||
|
numeric(10,2) -> numeric
|
||||||
|
text -> text
|
||||||
|
decimal(5) -> decimal
|
||||||
|
None -> None
|
||||||
|
|
||||||
|
Per dbt documentation, changes to size/precision/scale should not be
|
||||||
|
considered breaking changes for contracts.
|
||||||
|
See: https://docs.getdbt.com/reference/resource-configs/contract#size-precision-and-scale
|
||||||
|
|
||||||
|
Note: Comparison is case-insensitive. Type aliases (e.g., 'varchar' vs
|
||||||
|
'character varying') are not automatically resolved - users should use
|
||||||
|
consistent type names in their contracts to avoid false positives.
|
||||||
|
"""
|
||||||
|
if not data_type:
|
||||||
|
return data_type
|
||||||
|
|
||||||
|
# Split on the first '(' to get the base type without parameters
|
||||||
|
# Convert to lowercase for case-insensitive comparison
|
||||||
|
base_type, _, _ = data_type.partition("(")
|
||||||
|
return base_type.strip().lower()
|
||||||
|
|
||||||
def same_contract(self, old, adapter_type=None) -> bool:
|
def same_contract(self, old, adapter_type=None) -> bool:
|
||||||
# If the contract wasn't previously enforced:
|
# If the contract wasn't previously enforced:
|
||||||
if old.contract.enforced is False and self.contract.enforced is False:
|
if old.contract.enforced is False and self.contract.enforced is False:
|
||||||
@@ -738,14 +768,24 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
columns_removed.append(old_value.name)
|
columns_removed.append(old_value.name)
|
||||||
# Has this column's data type changed?
|
# Has this column's data type changed?
|
||||||
elif old_value.data_type != self.columns[old_key].data_type:
|
elif old_value.data_type != self.columns[old_key].data_type:
|
||||||
column_type_changes.append(
|
# Compare normalized data types (without size/precision/scale)
|
||||||
{
|
# to determine if this is a breaking change
|
||||||
"column_name": str(old_value.name),
|
old_normalized = self._normalize_data_type_for_comparison(old_value.data_type)
|
||||||
"previous_column_type": str(old_value.data_type),
|
new_normalized = self._normalize_data_type_for_comparison(
|
||||||
"current_column_type": str(self.columns[old_key].data_type),
|
self.columns[old_key].data_type
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Only consider it a breaking change if the base types differ
|
||||||
|
# Changes like varchar(3) -> varchar(10) are not breaking
|
||||||
|
if old_normalized != new_normalized:
|
||||||
|
column_type_changes.append(
|
||||||
|
{
|
||||||
|
"column_name": str(old_value.name),
|
||||||
|
"previous_column_type": str(old_value.data_type),
|
||||||
|
"current_column_type": str(self.columns[old_key].data_type),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# track if there are any column level constraints for the materialization check late
|
# track if there are any column level constraints for the materialization check late
|
||||||
if old_value.constraints:
|
if old_value.constraints:
|
||||||
column_constraints_exist = True
|
column_constraints_exist = True
|
||||||
@@ -1058,6 +1098,10 @@ class UnitTestSourceDefinition(ModelNode):
|
|||||||
source_name: str = "undefined"
|
source_name: str = "undefined"
|
||||||
quoting: QuotingResource = field(default_factory=QuotingResource)
|
quoting: QuotingResource = field(default_factory=QuotingResource)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cte_name(self):
|
||||||
|
return self.unique_id.split(".")[-1]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def search_name(self):
|
def search_name(self):
|
||||||
return f"{self.source_name}.{self.name}"
|
return f"{self.source_name}.{self.name}"
|
||||||
|
|||||||
@@ -366,6 +366,7 @@ class ProjectFlags(ExtensibleDbtClassMixin):
|
|||||||
validate_macro_args: bool = False
|
validate_macro_args: bool = False
|
||||||
require_all_warnings_handled_by_warn_error: bool = False
|
require_all_warnings_handled_by_warn_error: bool = False
|
||||||
require_generic_test_arguments_property: bool = True
|
require_generic_test_arguments_property: bool = True
|
||||||
|
require_unique_project_resource_names: bool = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def project_only_flags(self) -> Dict[str, Any]:
|
def project_only_flags(self) -> Dict[str, Any]:
|
||||||
@@ -382,6 +383,7 @@ class ProjectFlags(ExtensibleDbtClassMixin):
|
|||||||
"validate_macro_args": self.validate_macro_args,
|
"validate_macro_args": self.validate_macro_args,
|
||||||
"require_all_warnings_handled_by_warn_error": self.require_all_warnings_handled_by_warn_error,
|
"require_all_warnings_handled_by_warn_error": self.require_all_warnings_handled_by_warn_error,
|
||||||
"require_generic_test_arguments_property": self.require_generic_test_arguments_property,
|
"require_generic_test_arguments_property": self.require_generic_test_arguments_property,
|
||||||
|
"require_unique_project_resource_names": self.require_unique_project_resource_names,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -230,6 +230,11 @@ class ModulesItertoolsUsageDeprecation(DBTDeprecation):
|
|||||||
_event = "ModulesItertoolsUsageDeprecation"
|
_event = "ModulesItertoolsUsageDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateNameDistinctNodeTypesDeprecation(DBTDeprecation):
|
||||||
|
_name = "duplicate-name-distinct-node-types-deprecation"
|
||||||
|
_event = "DuplicateNameDistinctNodeTypesDeprecation"
|
||||||
|
|
||||||
|
|
||||||
def renamed_env_var(old_name: str, new_name: str):
|
def renamed_env_var(old_name: str, new_name: str):
|
||||||
class EnvironmentVariableRenamed(DBTDeprecation):
|
class EnvironmentVariableRenamed(DBTDeprecation):
|
||||||
_name = f"environment-variable-renamed:{old_name}"
|
_name = f"environment-variable-renamed:{old_name}"
|
||||||
@@ -266,7 +271,7 @@ def show_deprecations_summary() -> None:
|
|||||||
deprecation_event = deprecations[deprecation].event()
|
deprecation_event = deprecations[deprecation].event()
|
||||||
summaries.append(
|
summaries.append(
|
||||||
DeprecationSummary(
|
DeprecationSummary(
|
||||||
event_name=deprecation_event.__name__,
|
event_name=type(deprecation_event).__name__,
|
||||||
event_code=deprecation_event.code(),
|
event_code=deprecation_event.code(),
|
||||||
occurrences=occurrences,
|
occurrences=occurrences,
|
||||||
).to_msg_dict()
|
).to_msg_dict()
|
||||||
@@ -316,6 +321,7 @@ deprecations_list: List[DBTDeprecation] = [
|
|||||||
ArgumentsPropertyInGenericTestDeprecation(),
|
ArgumentsPropertyInGenericTestDeprecation(),
|
||||||
MissingArgumentsPropertyInGenericTestDeprecation(),
|
MissingArgumentsPropertyInGenericTestDeprecation(),
|
||||||
ModulesItertoolsUsageDeprecation(),
|
ModulesItertoolsUsageDeprecation(),
|
||||||
|
DuplicateNameDistinctNodeTypesDeprecation(),
|
||||||
]
|
]
|
||||||
|
|
||||||
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
||||||
|
|||||||
@@ -771,6 +771,15 @@ class MissingArgumentsPropertyInGenericTestDeprecation(WarnLevel):
|
|||||||
return line_wrap_message(deprecation_tag(description, self.__class__.__name__))
|
return line_wrap_message(deprecation_tag(description, self.__class__.__name__))
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateNameDistinctNodeTypesDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D040"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = f"Found resources with the same name '{self.resource_name}' in package '{self.package_name}': '{self.unique_id1}' and '{self.unique_id2}'. Please update one of the resources to have a unique name."
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
# =======================================================
|
# =======================================================
|
||||||
# I - Project parsing
|
# I - Project parsing
|
||||||
# =======================================================
|
# =======================================================
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ class AliasError(DbtValidationError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DependencyError(Exception):
|
class DependencyError(DbtRuntimeError):
|
||||||
CODE = 10006
|
CODE = 10006
|
||||||
MESSAGE = "Dependency Error"
|
MESSAGE = "Dependency Error"
|
||||||
|
|
||||||
|
|||||||
@@ -120,7 +120,9 @@ class NodeSelector(MethodManager):
|
|||||||
additional.update(self.graph.select_children(selected, depth))
|
additional.update(self.graph.select_children(selected, depth))
|
||||||
return additional
|
return additional
|
||||||
|
|
||||||
def select_nodes_recursively(self, spec: SelectionSpec) -> Tuple[Set[UniqueId], Set[UniqueId]]:
|
def select_nodes_recursively(
|
||||||
|
self, spec: SelectionSpec, warn_on_no_nodes: bool = True
|
||||||
|
) -> Tuple[Set[UniqueId], Set[UniqueId]]:
|
||||||
"""If the spec is a composite spec (a union, difference, or intersection),
|
"""If the spec is a composite spec (a union, difference, or intersection),
|
||||||
recurse into its selections and combine them. If the spec is a concrete
|
recurse into its selections and combine them. If the spec is a concrete
|
||||||
selection criteria, resolve that using the given graph.
|
selection criteria, resolve that using the given graph.
|
||||||
@@ -128,7 +130,10 @@ class NodeSelector(MethodManager):
|
|||||||
if isinstance(spec, SelectionCriteria):
|
if isinstance(spec, SelectionCriteria):
|
||||||
direct_nodes, indirect_nodes = self.get_nodes_from_criteria(spec)
|
direct_nodes, indirect_nodes = self.get_nodes_from_criteria(spec)
|
||||||
else:
|
else:
|
||||||
bundles = [self.select_nodes_recursively(component) for component in spec]
|
bundles = [
|
||||||
|
self.select_nodes_recursively(spec=component, warn_on_no_nodes=warn_on_no_nodes)
|
||||||
|
for component in spec.components
|
||||||
|
]
|
||||||
|
|
||||||
direct_sets = []
|
direct_sets = []
|
||||||
indirect_sets = []
|
indirect_sets = []
|
||||||
@@ -144,19 +149,23 @@ class NodeSelector(MethodManager):
|
|||||||
initial_direct, indirect_nodes, spec.indirect_selection
|
initial_direct, indirect_nodes, spec.indirect_selection
|
||||||
)
|
)
|
||||||
|
|
||||||
if spec.expect_exists and len(direct_nodes) == 0:
|
if spec.expect_exists and len(direct_nodes) == 0 and warn_on_no_nodes:
|
||||||
warn_or_error(NoNodesForSelectionCriteria(spec_raw=str(spec.raw)))
|
warn_or_error(NoNodesForSelectionCriteria(spec_raw=str(spec.raw)))
|
||||||
|
|
||||||
return direct_nodes, indirect_nodes
|
return direct_nodes, indirect_nodes
|
||||||
|
|
||||||
def select_nodes(self, spec: SelectionSpec) -> Tuple[Set[UniqueId], Set[UniqueId]]:
|
def select_nodes(
|
||||||
|
self, spec: SelectionSpec, warn_on_no_nodes: bool = True
|
||||||
|
) -> Tuple[Set[UniqueId], Set[UniqueId]]:
|
||||||
"""Select the nodes in the graph according to the spec.
|
"""Select the nodes in the graph according to the spec.
|
||||||
|
|
||||||
This is the main point of entry for turning a spec into a set of nodes:
|
This is the main point of entry for turning a spec into a set of nodes:
|
||||||
- Recurse through spec, select by criteria, combine by set operation
|
- Recurse through spec, select by criteria, combine by set operation
|
||||||
- Return final (unfiltered) selection set
|
- Return final (unfiltered) selection set
|
||||||
"""
|
"""
|
||||||
direct_nodes, indirect_nodes = self.select_nodes_recursively(spec)
|
direct_nodes, indirect_nodes = self.select_nodes_recursively(
|
||||||
|
spec=spec, warn_on_no_nodes=warn_on_no_nodes
|
||||||
|
)
|
||||||
indirect_only = indirect_nodes.difference(direct_nodes)
|
indirect_only = indirect_nodes.difference(direct_nodes)
|
||||||
return direct_nodes, indirect_only
|
return direct_nodes, indirect_only
|
||||||
|
|
||||||
@@ -324,7 +333,7 @@ class NodeSelector(MethodManager):
|
|||||||
|
|
||||||
return selected
|
return selected
|
||||||
|
|
||||||
def get_selected(self, spec: SelectionSpec) -> Set[UniqueId]:
|
def get_selected(self, spec: SelectionSpec, warn_on_no_nodes: bool = True) -> Set[UniqueId]:
|
||||||
"""get_selected runs through the node selection process:
|
"""get_selected runs through the node selection process:
|
||||||
|
|
||||||
- node selection. Based on the include/exclude sets, the set
|
- node selection. Based on the include/exclude sets, the set
|
||||||
@@ -334,7 +343,9 @@ class NodeSelector(MethodManager):
|
|||||||
- selectors can filter the nodes after all of them have been
|
- selectors can filter the nodes after all of them have been
|
||||||
selected
|
selected
|
||||||
"""
|
"""
|
||||||
selected_nodes, indirect_only = self.select_nodes(spec)
|
selected_nodes, indirect_only = self.select_nodes(
|
||||||
|
spec=spec, warn_on_no_nodes=warn_on_no_nodes
|
||||||
|
)
|
||||||
filtered_nodes = self.filter_selection(selected_nodes)
|
filtered_nodes = self.filter_selection(selected_nodes)
|
||||||
|
|
||||||
return filtered_nodes
|
return filtered_nodes
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ from dbt.contracts.state import PreviousState
|
|||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt_common.dataclass_schema import StrEnum
|
from dbt_common.dataclass_schema import StrEnum
|
||||||
from dbt_common.events.contextvars import get_project_root
|
from dbt_common.events.contextvars import get_project_root
|
||||||
from dbt_common.exceptions import DbtInternalError, DbtRuntimeError
|
from dbt_common.exceptions import CompilationError, DbtInternalError, DbtRuntimeError
|
||||||
|
|
||||||
from .graph import UniqueId
|
from .graph import UniqueId
|
||||||
|
|
||||||
@@ -655,6 +655,16 @@ class StateSelectorMethod(SelectorMethod):
|
|||||||
continue
|
continue
|
||||||
visited_macros.append(macro_uid)
|
visited_macros.append(macro_uid)
|
||||||
|
|
||||||
|
# If macro_uid is None, it means the macro/test was removed but is still referenced.
|
||||||
|
# Raise a clear error to match the behavior of regular dbt run.
|
||||||
|
if macro_uid is None:
|
||||||
|
raise CompilationError(
|
||||||
|
f"Node '{node.name}' (in {node.original_file_path}) depends on a macro or test "
|
||||||
|
f"that does not exist. This can happen when a macro or generic test is removed "
|
||||||
|
f"but is still referenced. Check for typos and/or install package dependencies "
|
||||||
|
f"with 'dbt deps'."
|
||||||
|
)
|
||||||
|
|
||||||
if macro_uid in self.modified_macros:
|
if macro_uid in self.modified_macros:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
from datetime import date, datetime
|
from datetime import date, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -141,9 +140,6 @@ def _get_allowed_config_fields_from_error_path(
|
|||||||
|
|
||||||
|
|
||||||
def _can_run_validations() -> bool:
|
def _can_run_validations() -> bool:
|
||||||
if not os.environ.get("DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS"):
|
|
||||||
return False
|
|
||||||
|
|
||||||
invocation_context = get_invocation_context()
|
invocation_context = get_invocation_context()
|
||||||
return invocation_context.adapter_types.issubset(_JSONSCHEMA_SUPPORTED_ADAPTERS)
|
return invocation_context.adapter_types.issubset(_JSONSCHEMA_SUPPORTED_ADAPTERS)
|
||||||
|
|
||||||
@@ -168,6 +164,10 @@ def jsonschema_validate(schema: Dict[str, Any], json: Dict[str, Any], file_path:
|
|||||||
else:
|
else:
|
||||||
key_path = error_path_to_string(error)
|
key_path = error_path_to_string(error)
|
||||||
for key in keys:
|
for key in keys:
|
||||||
|
# Type params are not in the metrics v2 jsonschema from fusion, but dbt-core continues to maintain support for them in v1.
|
||||||
|
if key == "type_params":
|
||||||
|
continue
|
||||||
|
|
||||||
if key == "overrides" and key_path.startswith("sources"):
|
if key == "overrides" and key_path.startswith("sources"):
|
||||||
|
|
||||||
deprecations.warn(
|
deprecations.warn(
|
||||||
@@ -265,6 +265,11 @@ def validate_model_config(config: Dict[str, Any], file_path: str) -> None:
|
|||||||
if len(error.path) == 0:
|
if len(error.path) == 0:
|
||||||
key_path = error_path_to_string(error)
|
key_path = error_path_to_string(error)
|
||||||
for key in keys:
|
for key in keys:
|
||||||
|
# Special case for pre/post hook keys as they are updated during config parsing
|
||||||
|
# from the user-provided pre_hook/post_hook to pre-hook/post-hook keys.
|
||||||
|
# Avoids false positives as described in https://github.com/dbt-labs/dbt-core/issues/12087
|
||||||
|
if key in ("post-hook", "pre-hook"):
|
||||||
|
continue
|
||||||
deprecations.warn(
|
deprecations.warn(
|
||||||
"custom-key-in-config-deprecation",
|
"custom-key-in-config-deprecation",
|
||||||
key=key,
|
key=key,
|
||||||
|
|||||||
@@ -6,6 +6,16 @@
|
|||||||
"name"
|
"name"
|
||||||
],
|
],
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"analyses": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/ProjectAnalysisConfig"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"analysis-paths": {
|
"analysis-paths": {
|
||||||
"type": [
|
"type": [
|
||||||
"array",
|
"array",
|
||||||
@@ -98,6 +108,25 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"function-paths": {
|
||||||
|
"type": [
|
||||||
|
"array",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"functions": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/ProjectFunctionConfig"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"log-path": {
|
"log-path": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -220,7 +249,7 @@
|
|||||||
"saved-queries": {
|
"saved-queries": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"$ref": "#/definitions/ProjectSavedQueriesConfig"
|
"$ref": "#/definitions/ProjectSavedQueryConfig"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "null"
|
"type": "null"
|
||||||
@@ -399,6 +428,17 @@
|
|||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
},
|
},
|
||||||
|
"DataLakeObjectCategory": {
|
||||||
|
"description": "See `category` from https://developer.salesforce.com/docs/data/connectapi/references/spec?meta=postDataLakeObject",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"Profile",
|
||||||
|
"Engagement",
|
||||||
|
"Directory_Table",
|
||||||
|
"Insights",
|
||||||
|
"Other"
|
||||||
|
]
|
||||||
|
},
|
||||||
"DbtBatchSize": {
|
"DbtBatchSize": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
@@ -441,8 +481,7 @@
|
|||||||
"merge",
|
"merge",
|
||||||
"delete+insert",
|
"delete+insert",
|
||||||
"insert_overwrite",
|
"insert_overwrite",
|
||||||
"microbatch",
|
"microbatch"
|
||||||
"unknown"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -451,6 +490,18 @@
|
|||||||
"enum": [
|
"enum": [
|
||||||
"replace_where"
|
"replace_where"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"custom"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"custom": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -459,6 +510,8 @@
|
|||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
|
"snapshot",
|
||||||
|
"seed",
|
||||||
"view",
|
"view",
|
||||||
"table",
|
"table",
|
||||||
"incremental",
|
"incremental",
|
||||||
@@ -467,7 +520,8 @@
|
|||||||
"test",
|
"test",
|
||||||
"ephemeral",
|
"ephemeral",
|
||||||
"unit",
|
"unit",
|
||||||
"analysis"
|
"analysis",
|
||||||
|
"function"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -484,6 +538,13 @@
|
|||||||
"dynamic_table"
|
"dynamic_table"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": "for inline SQL compilation",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"inline"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
@@ -647,6 +708,15 @@
|
|||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
},
|
},
|
||||||
|
"FunctionKind": {
|
||||||
|
"description": "Function kind enum with same values as UDFKind",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"scalar",
|
||||||
|
"aggregate",
|
||||||
|
"table"
|
||||||
|
]
|
||||||
|
},
|
||||||
"GrantAccessToTarget": {
|
"GrantAccessToTarget": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -676,6 +746,12 @@
|
|||||||
"HookConfig": {
|
"HookConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"index": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"sql": {
|
"sql": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -788,7 +864,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"PartitionConfig": {
|
"PartitionConfig": {
|
||||||
"description": "dbt-core allows either of the variants for the `partition_by` in the model config but the bigquery-adapter throws RunTime error the behaviors are tested from the latest dbt-core + bigquery-adapter as this is written we're conformant to this behavior via here and via the `validate` method",
|
"description": "dbt-core allows either of the variants for the `partition_by` in the model config but the bigquery-adapter throws RunTime error the behaviors are tested from the latest dbt-core + bigquery-adapter as this is written we're conformant to this behavior via here and via the `into_bigquery()` method",
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@@ -849,9 +925,77 @@
|
|||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
},
|
},
|
||||||
|
"ProjectAnalysisConfig": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"+docs": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/DocsConfig"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+enabled": {
|
||||||
|
"type": [
|
||||||
|
"boolean",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+group": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+meta": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"+static_analysis": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/StaticAnalysisKind"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+tags": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/StringOrArrayOfStrings"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/ProjectAnalysisConfig"
|
||||||
|
}
|
||||||
|
},
|
||||||
"ProjectDataTestConfig": {
|
"ProjectDataTestConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"+adapter_properties": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+alias": {
|
"+alias": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -919,6 +1063,12 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+catalog_name": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+cluster_by": {
|
"+cluster_by": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -1194,11 +1344,10 @@
|
|||||||
},
|
},
|
||||||
"+refresh_interval_minutes": {
|
"+refresh_interval_minutes": {
|
||||||
"type": [
|
"type": [
|
||||||
"integer",
|
"number",
|
||||||
"null"
|
"null"
|
||||||
],
|
],
|
||||||
"format": "uint64",
|
"format": "double"
|
||||||
"minimum": 0.0
|
|
||||||
},
|
},
|
||||||
"+refresh_mode": {
|
"+refresh_mode": {
|
||||||
"type": [
|
"type": [
|
||||||
@@ -1476,6 +1625,12 @@
|
|||||||
"type": "null"
|
"type": "null"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"tenant_hostname": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
@@ -1513,6 +1668,150 @@
|
|||||||
"$ref": "#/definitions/ProjectExposureConfig"
|
"$ref": "#/definitions/ProjectExposureConfig"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"ProjectFunctionConfig": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"+access": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/Access"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+alias": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+database": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+description": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+docs": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/DocsConfig"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+enabled": {
|
||||||
|
"type": [
|
||||||
|
"boolean",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+grants": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/StringOrArrayOfStrings"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"+group": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+language": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+meta": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"+on_configuration_change": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+quoting": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/DbtQuoting"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+schema": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+static_analysis": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/StaticAnalysisKind"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+tags": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/StringOrArrayOfStrings"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+type": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/FunctionKind"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+volatility": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/Volatility"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/ProjectFunctionConfig"
|
||||||
|
}
|
||||||
|
},
|
||||||
"ProjectMetricConfigs": {
|
"ProjectMetricConfigs": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@@ -1565,6 +1864,15 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+adapter_properties": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+alias": {
|
"+alias": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -1654,6 +1962,16 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+category": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/DataLakeObjectCategory"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"+cluster_by": {
|
"+cluster_by": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -1828,6 +2146,16 @@
|
|||||||
"format": "uint64",
|
"format": "uint64",
|
||||||
"minimum": 0.0
|
"minimum": 0.0
|
||||||
},
|
},
|
||||||
|
"+imports": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/StringOrArrayOfStrings"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"+include_full_name_in_path": {
|
"+include_full_name_in_path": {
|
||||||
"type": [
|
"type": [
|
||||||
"boolean",
|
"boolean",
|
||||||
@@ -2089,6 +2417,12 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"+primary_key": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+query_tag": {
|
"+query_tag": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -2107,11 +2441,10 @@
|
|||||||
},
|
},
|
||||||
"+refresh_interval_minutes": {
|
"+refresh_interval_minutes": {
|
||||||
"type": [
|
"type": [
|
||||||
"integer",
|
"number",
|
||||||
"null"
|
"null"
|
||||||
],
|
],
|
||||||
"format": "uint64",
|
"format": "double"
|
||||||
"minimum": 0.0
|
|
||||||
},
|
},
|
||||||
"+refresh_mode": {
|
"+refresh_mode": {
|
||||||
"type": [
|
"type": [
|
||||||
@@ -2125,6 +2458,15 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+resource_tags": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+row_access_policy": {
|
"+row_access_policy": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -2278,13 +2620,13 @@
|
|||||||
"$ref": "#/definitions/ProjectModelConfig"
|
"$ref": "#/definitions/ProjectModelConfig"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ProjectSavedQueriesConfig": {
|
"ProjectSavedQueryConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"+cache": {
|
"+cache": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
"$ref": "#/definitions/SavedQueriesConfigCache"
|
"$ref": "#/definitions/SavedQueryCache"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "null"
|
"type": "null"
|
||||||
@@ -2340,12 +2682,21 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": {
|
"additionalProperties": {
|
||||||
"$ref": "#/definitions/ProjectSavedQueriesConfig"
|
"$ref": "#/definitions/ProjectSavedQueryConfig"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ProjectSeedConfig": {
|
"ProjectSeedConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"+adapter_properties": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+alias": {
|
"+alias": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -2413,6 +2764,12 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+catalog_name": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+cluster_by": {
|
"+cluster_by": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -2751,11 +3108,10 @@
|
|||||||
},
|
},
|
||||||
"+refresh_interval_minutes": {
|
"+refresh_interval_minutes": {
|
||||||
"type": [
|
"type": [
|
||||||
"integer",
|
"number",
|
||||||
"null"
|
"null"
|
||||||
],
|
],
|
||||||
"format": "uint64",
|
"format": "double"
|
||||||
"minimum": 0.0
|
|
||||||
},
|
},
|
||||||
"+refresh_mode": {
|
"+refresh_mode": {
|
||||||
"type": [
|
"type": [
|
||||||
@@ -2900,6 +3256,27 @@
|
|||||||
"ProjectSemanticModelConfig": {
|
"ProjectSemanticModelConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"+enabled": {
|
||||||
|
"type": [
|
||||||
|
"boolean",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+group": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"+meta": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+tags": {
|
"+tags": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -2909,27 +3286,6 @@
|
|||||||
"type": "null"
|
"type": "null"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
|
||||||
"enabled": {
|
|
||||||
"type": [
|
|
||||||
"boolean",
|
|
||||||
"null"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"group": {
|
|
||||||
"type": [
|
|
||||||
"string",
|
|
||||||
"null"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"meta": {
|
|
||||||
"type": [
|
|
||||||
"object",
|
|
||||||
"null"
|
|
||||||
],
|
|
||||||
"additionalProperties": {
|
|
||||||
"$ref": "#/definitions/AnyValue"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": {
|
"additionalProperties": {
|
||||||
@@ -2939,6 +3295,15 @@
|
|||||||
"ProjectSnapshotConfig": {
|
"ProjectSnapshotConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"+adapter_properties": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+alias": {
|
"+alias": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
@@ -3006,6 +3371,12 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+catalog_name": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+check_cols": {
|
"+check_cols": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -3365,11 +3736,10 @@
|
|||||||
},
|
},
|
||||||
"+refresh_interval_minutes": {
|
"+refresh_interval_minutes": {
|
||||||
"type": [
|
"type": [
|
||||||
"integer",
|
"number",
|
||||||
"null"
|
"null"
|
||||||
],
|
],
|
||||||
"format": "uint64",
|
"format": "double"
|
||||||
"minimum": 0.0
|
|
||||||
},
|
},
|
||||||
"+refresh_mode": {
|
"+refresh_mode": {
|
||||||
"type": [
|
"type": [
|
||||||
@@ -3505,12 +3875,24 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+target_database": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+target_lag": {
|
"+target_lag": {
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+target_schema": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+tblproperties": {
|
"+tblproperties": {
|
||||||
"type": [
|
"type": [
|
||||||
"object",
|
"object",
|
||||||
@@ -3556,6 +3938,15 @@
|
|||||||
"ProjectSourceConfig": {
|
"ProjectSourceConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"+adapter_properties": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+as_columnstore": {
|
"+as_columnstore": {
|
||||||
"type": [
|
"type": [
|
||||||
"boolean",
|
"boolean",
|
||||||
@@ -3617,6 +4008,12 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+catalog_name": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+cluster_by": {
|
"+cluster_by": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -3889,11 +4286,10 @@
|
|||||||
},
|
},
|
||||||
"+refresh_interval_minutes": {
|
"+refresh_interval_minutes": {
|
||||||
"type": [
|
"type": [
|
||||||
"integer",
|
"number",
|
||||||
"null"
|
"null"
|
||||||
],
|
],
|
||||||
"format": "uint64",
|
"format": "double"
|
||||||
"minimum": 0.0
|
|
||||||
},
|
},
|
||||||
"+refresh_mode": {
|
"+refresh_mode": {
|
||||||
"type": [
|
"type": [
|
||||||
@@ -4042,6 +4438,15 @@
|
|||||||
"ProjectUnitTestConfig": {
|
"ProjectUnitTestConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"+adapter_properties": {
|
||||||
|
"type": [
|
||||||
|
"object",
|
||||||
|
"null"
|
||||||
|
],
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/AnyValue"
|
||||||
|
}
|
||||||
|
},
|
||||||
"+as_columnstore": {
|
"+as_columnstore": {
|
||||||
"type": [
|
"type": [
|
||||||
"boolean",
|
"boolean",
|
||||||
@@ -4103,6 +4508,12 @@
|
|||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"+catalog_name": {
|
||||||
|
"type": [
|
||||||
|
"string",
|
||||||
|
"null"
|
||||||
|
]
|
||||||
|
},
|
||||||
"+cluster_by": {
|
"+cluster_by": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
@@ -4337,11 +4748,10 @@
|
|||||||
},
|
},
|
||||||
"+refresh_interval_minutes": {
|
"+refresh_interval_minutes": {
|
||||||
"type": [
|
"type": [
|
||||||
"integer",
|
"number",
|
||||||
"null"
|
"null"
|
||||||
],
|
],
|
||||||
"format": "uint64",
|
"format": "double"
|
||||||
"minimum": 0.0
|
|
||||||
},
|
},
|
||||||
"+refresh_mode": {
|
"+refresh_mode": {
|
||||||
"type": [
|
"type": [
|
||||||
@@ -4535,7 +4945,7 @@
|
|||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
},
|
},
|
||||||
"SavedQueriesConfigCache": {
|
"SavedQueryCache": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"enabled": {
|
"enabled": {
|
||||||
@@ -4576,35 +4986,30 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"dbt_is_deleted": {
|
"dbt_is_deleted": {
|
||||||
"default": "DBT_IS_DELETED",
|
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dbt_scd_id": {
|
"dbt_scd_id": {
|
||||||
"default": "DBT_SCD_ID",
|
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dbt_updated_at": {
|
"dbt_updated_at": {
|
||||||
"default": "DBT_UPDATED_AT",
|
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dbt_valid_from": {
|
"dbt_valid_from": {
|
||||||
"default": "DBT_VALID_FROM",
|
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
"null"
|
"null"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dbt_valid_to": {
|
"dbt_valid_to": {
|
||||||
"default": "DBT_VALID_TO",
|
|
||||||
"type": [
|
"type": [
|
||||||
"string",
|
"string",
|
||||||
"null"
|
"null"
|
||||||
@@ -4694,6 +5099,31 @@
|
|||||||
"all"
|
"all"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"Volatility": {
|
||||||
|
"description": "Function volatility enum - defines the function's eligibility for certain optimizations Matches the Python Volatility enum from dbt-core",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"stable"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Deterministic - An deterministic function will always return the same output when given the same input.",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"deterministic"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "NonDeterministic - A non-deterministic function may change the return value from evaluation to evaluation. Multiple invocations of a non-deterministic function may return different results when used in the same query.",
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"non-deterministic"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"_Dispatch": {
|
"_Dispatch": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,5 @@
|
|||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
|
from dbt.contracts.files import SourceFile
|
||||||
from dbt.contracts.graph.nodes import FunctionNode, ManifestNode
|
from dbt.contracts.graph.nodes import FunctionNode, ManifestNode
|
||||||
from dbt.parser.base import SimpleParser
|
from dbt.parser.base import SimpleParser
|
||||||
from dbt.parser.search import FileBlock
|
from dbt.parser.search import FileBlock
|
||||||
@@ -21,10 +22,12 @@ class FunctionParser(SimpleParser[FileBlock, FunctionNode]):
|
|||||||
# overrides SimpleSQLParser.add_result_node
|
# overrides SimpleSQLParser.add_result_node
|
||||||
def add_result_node(self, block: FileBlock, node: ManifestNode):
|
def add_result_node(self, block: FileBlock, node: ManifestNode):
|
||||||
assert isinstance(node, FunctionNode), "Got non FunctionNode in FunctionParser"
|
assert isinstance(node, FunctionNode), "Got non FunctionNode in FunctionParser"
|
||||||
|
file = block.file
|
||||||
|
assert isinstance(file, SourceFile)
|
||||||
if node.config.enabled:
|
if node.config.enabled:
|
||||||
self.manifest.add_function(node)
|
self.manifest.add_function(file, node)
|
||||||
else:
|
else:
|
||||||
self.manifest.add_disabled(block.file, node)
|
self.manifest.add_disabled(file, node)
|
||||||
|
|
||||||
def parse_file(self, file_block: FileBlock) -> None:
|
def parse_file(self, file_block: FileBlock) -> None:
|
||||||
self.parse_node(file_block)
|
self.parse_node(file_block)
|
||||||
|
|||||||
@@ -90,6 +90,7 @@ from dbt.events.types import (
|
|||||||
)
|
)
|
||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
AmbiguousAliasError,
|
AmbiguousAliasError,
|
||||||
|
DuplicateResourceNameError,
|
||||||
InvalidAccessTypeError,
|
InvalidAccessTypeError,
|
||||||
TargetNotFoundError,
|
TargetNotFoundError,
|
||||||
scrub_secrets,
|
scrub_secrets,
|
||||||
@@ -525,6 +526,7 @@ class ManifestLoader:
|
|||||||
self.check_for_microbatch_deprecations()
|
self.check_for_microbatch_deprecations()
|
||||||
self.check_forcing_batch_concurrency()
|
self.check_forcing_batch_concurrency()
|
||||||
self.check_microbatch_model_has_a_filtered_input()
|
self.check_microbatch_model_has_a_filtered_input()
|
||||||
|
self.check_function_default_arguments_ordering()
|
||||||
|
|
||||||
return self.manifest
|
return self.manifest
|
||||||
|
|
||||||
@@ -536,6 +538,9 @@ class ManifestLoader:
|
|||||||
self.skip_parsing = self.partial_parser.skip_parsing()
|
self.skip_parsing = self.partial_parser.skip_parsing()
|
||||||
if self.skip_parsing:
|
if self.skip_parsing:
|
||||||
# nothing changed, so we don't need to generate project_parser_files
|
# nothing changed, so we don't need to generate project_parser_files
|
||||||
|
fire_event(
|
||||||
|
Note(msg="Nothing changed, skipping partial parsing."), level=EventLevel.DEBUG
|
||||||
|
)
|
||||||
self.manifest = self.saved_manifest # type: ignore[assignment]
|
self.manifest = self.saved_manifest # type: ignore[assignment]
|
||||||
else:
|
else:
|
||||||
# create child_map and parent_map
|
# create child_map and parent_map
|
||||||
@@ -1547,6 +1552,17 @@ class ManifestLoader:
|
|||||||
if not has_input_with_event_time_config:
|
if not has_input_with_event_time_config:
|
||||||
fire_event(MicrobatchModelNoEventTimeInputs(model_name=node.name))
|
fire_event(MicrobatchModelNoEventTimeInputs(model_name=node.name))
|
||||||
|
|
||||||
|
def check_function_default_arguments_ordering(self):
|
||||||
|
for function in self.manifest.functions.values():
|
||||||
|
found_default_value = False
|
||||||
|
for argument in function.arguments:
|
||||||
|
if not found_default_value and argument.default_value is not None:
|
||||||
|
found_default_value = True
|
||||||
|
elif found_default_value and argument.default_value is None:
|
||||||
|
raise dbt.exceptions.ParsingError(
|
||||||
|
f"Non-defaulted argument '{argument.name}' of function '{function.name}' comes after a defaulted argument. Non-defaulted arguments cannot come after defaulted arguments. "
|
||||||
|
)
|
||||||
|
|
||||||
def write_perf_info(self, target_path: str):
|
def write_perf_info(self, target_path: str):
|
||||||
path = os.path.join(target_path, PERF_INFO_FILE_NAME)
|
path = os.path.join(target_path, PERF_INFO_FILE_NAME)
|
||||||
write_file(path, json.dumps(self._perf_info, cls=dbt.utils.JSONEncoder, indent=4))
|
write_file(path, json.dumps(self._perf_info, cls=dbt.utils.JSONEncoder, indent=4))
|
||||||
@@ -1635,12 +1651,26 @@ def _check_resource_uniqueness(
|
|||||||
alias_resources: Dict[str, ManifestNode] = {}
|
alias_resources: Dict[str, ManifestNode] = {}
|
||||||
name_resources: Dict[str, Dict] = {}
|
name_resources: Dict[str, Dict] = {}
|
||||||
|
|
||||||
for resource, node in manifest.nodes.items():
|
for _, node in manifest.nodes.items():
|
||||||
if not node.is_relational:
|
if not node.is_relational:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if node.package_name not in name_resources:
|
if node.package_name not in name_resources:
|
||||||
name_resources[node.package_name] = {"ver": {}, "unver": {}}
|
name_resources[node.package_name] = {"ver": {}, "unver": {}}
|
||||||
|
|
||||||
|
existing_unversioned_node = name_resources[node.package_name]["unver"].get(node.name)
|
||||||
|
if existing_unversioned_node is not None and not node.is_versioned:
|
||||||
|
if get_flags().require_unique_project_resource_names:
|
||||||
|
raise DuplicateResourceNameError(existing_unversioned_node, node)
|
||||||
|
else:
|
||||||
|
dbt.deprecations.warn(
|
||||||
|
"duplicate-name-distinct-node-types-deprecation",
|
||||||
|
resource_name=node.name,
|
||||||
|
unique_id1=existing_unversioned_node.unique_id,
|
||||||
|
unique_id2=node.unique_id,
|
||||||
|
package_name=node.package_name,
|
||||||
|
)
|
||||||
|
|
||||||
if node.is_versioned:
|
if node.is_versioned:
|
||||||
name_resources[node.package_name]["ver"][node.name] = node
|
name_resources[node.package_name]["ver"][node.name] = node
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -11,7 +11,13 @@ from dbt.contracts.files import (
|
|||||||
parse_file_type_to_parser,
|
parse_file_type_to_parser,
|
||||||
)
|
)
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
from dbt.contracts.graph.nodes import AnalysisNode, ModelNode, SeedNode, SnapshotNode
|
from dbt.contracts.graph.nodes import (
|
||||||
|
AnalysisNode,
|
||||||
|
GenericTestNode,
|
||||||
|
ModelNode,
|
||||||
|
SeedNode,
|
||||||
|
SnapshotNode,
|
||||||
|
)
|
||||||
from dbt.events.types import PartialParsingEnabled, PartialParsingFile
|
from dbt.events.types import PartialParsingEnabled, PartialParsingFile
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt_common.context import get_invocation_context
|
from dbt_common.context import get_invocation_context
|
||||||
@@ -58,6 +64,7 @@ special_override_macros = [
|
|||||||
"generate_schema_name",
|
"generate_schema_name",
|
||||||
"generate_database_name",
|
"generate_database_name",
|
||||||
"generate_alias_name",
|
"generate_alias_name",
|
||||||
|
"function",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -295,6 +302,10 @@ class PartialParsing:
|
|||||||
if saved_source_file.parse_file_type == ParseFileType.Fixture:
|
if saved_source_file.parse_file_type == ParseFileType.Fixture:
|
||||||
self.delete_fixture_node(saved_source_file)
|
self.delete_fixture_node(saved_source_file)
|
||||||
|
|
||||||
|
# functions
|
||||||
|
if saved_source_file.parse_file_type == ParseFileType.Function:
|
||||||
|
self.delete_function_node(saved_source_file)
|
||||||
|
|
||||||
fire_event(PartialParsingFile(operation="deleted", file_id=file_id))
|
fire_event(PartialParsingFile(operation="deleted", file_id=file_id))
|
||||||
|
|
||||||
# Updates for non-schema files
|
# Updates for non-schema files
|
||||||
@@ -310,6 +321,8 @@ class PartialParsing:
|
|||||||
self.update_doc_in_saved(new_source_file, old_source_file)
|
self.update_doc_in_saved(new_source_file, old_source_file)
|
||||||
elif new_source_file.parse_file_type == ParseFileType.Fixture:
|
elif new_source_file.parse_file_type == ParseFileType.Fixture:
|
||||||
self.update_fixture_in_saved(new_source_file, old_source_file)
|
self.update_fixture_in_saved(new_source_file, old_source_file)
|
||||||
|
elif new_source_file.parse_file_type == ParseFileType.Function:
|
||||||
|
self.update_function_in_saved(new_source_file, old_source_file)
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Invalid parse_file_type in source_file {file_id}")
|
raise Exception(f"Invalid parse_file_type in source_file {file_id}")
|
||||||
fire_event(PartialParsingFile(operation="updated", file_id=file_id))
|
fire_event(PartialParsingFile(operation="updated", file_id=file_id))
|
||||||
@@ -405,6 +418,15 @@ class PartialParsing:
|
|||||||
self.saved_files[new_source_file.file_id] = deepcopy(new_source_file)
|
self.saved_files[new_source_file.file_id] = deepcopy(new_source_file)
|
||||||
self.add_to_pp_files(new_source_file)
|
self.add_to_pp_files(new_source_file)
|
||||||
|
|
||||||
|
def update_function_in_saved(
|
||||||
|
self, new_source_file: SourceFile, old_source_file: SourceFile
|
||||||
|
) -> None:
|
||||||
|
if self.already_scheduled_for_parsing(old_source_file):
|
||||||
|
return
|
||||||
|
self.delete_function_node(old_source_file)
|
||||||
|
self.saved_files[new_source_file.file_id] = deepcopy(new_source_file)
|
||||||
|
self.add_to_pp_files(new_source_file)
|
||||||
|
|
||||||
def remove_mssat_file(self, source_file: AnySourceFile):
|
def remove_mssat_file(self, source_file: AnySourceFile):
|
||||||
# nodes [unique_ids] -- SQL files
|
# nodes [unique_ids] -- SQL files
|
||||||
# There should always be a node for a SQL file
|
# There should always be a node for a SQL file
|
||||||
@@ -630,6 +652,42 @@ class PartialParsing:
|
|||||||
source_file.unit_tests.remove(unique_id)
|
source_file.unit_tests.remove(unique_id)
|
||||||
self.saved_manifest.files.pop(source_file.file_id)
|
self.saved_manifest.files.pop(source_file.file_id)
|
||||||
|
|
||||||
|
def delete_function_node(self, source_file: SourceFile) -> None:
|
||||||
|
# There should always be a node for a Function file
|
||||||
|
if not isinstance(source_file, SourceFile) or not source_file.functions:
|
||||||
|
return
|
||||||
|
|
||||||
|
# There can only be one node of a function
|
||||||
|
function_unique_id = source_file.functions[0]
|
||||||
|
|
||||||
|
# Remove the function node from the saved manifest
|
||||||
|
function_node = self.saved_manifest.functions.pop(function_unique_id)
|
||||||
|
|
||||||
|
# Remove the function node from the source file so that it's not viewed as a
|
||||||
|
# duplicate when it's re-added
|
||||||
|
source_file.functions.remove(function_unique_id)
|
||||||
|
|
||||||
|
# If this function had a schema patch, schedule that schema element to be reapplied.
|
||||||
|
patch_path = function_node.patch_path
|
||||||
|
if (
|
||||||
|
patch_path is not None
|
||||||
|
and patch_path in self.saved_files
|
||||||
|
and patch_path not in self.file_diff["deleted_schema_files"]
|
||||||
|
):
|
||||||
|
schema_file = self.saved_files[patch_path]
|
||||||
|
# Only proceed if this is a schema file
|
||||||
|
if isinstance(schema_file, SchemaSourceFile):
|
||||||
|
elements = schema_file.dict_from_yaml.get("functions", [])
|
||||||
|
schema_element = self.get_schema_element(elements, function_node.name)
|
||||||
|
if schema_element:
|
||||||
|
# Remove any previous links and re-merge the patch to pp_dict so it gets reparsed
|
||||||
|
self.delete_schema_function(schema_file, schema_element)
|
||||||
|
self.merge_patch(schema_file, "functions", schema_element)
|
||||||
|
|
||||||
|
# Finally, remove the deleted function file from saved files
|
||||||
|
if source_file.file_id in self.saved_manifest.files:
|
||||||
|
self.saved_manifest.files.pop(source_file.file_id)
|
||||||
|
|
||||||
# Schema files -----------------------
|
# Schema files -----------------------
|
||||||
# Changed schema files
|
# Changed schema files
|
||||||
def change_schema_file(self, file_id):
|
def change_schema_file(self, file_id):
|
||||||
@@ -744,6 +802,7 @@ class PartialParsing:
|
|||||||
handle_change("unit_tests", self.delete_schema_unit_test)
|
handle_change("unit_tests", self.delete_schema_unit_test)
|
||||||
handle_change("saved_queries", self.delete_schema_saved_query)
|
handle_change("saved_queries", self.delete_schema_saved_query)
|
||||||
handle_change("data_tests", self.delete_schema_data_test_patch)
|
handle_change("data_tests", self.delete_schema_data_test_patch)
|
||||||
|
handle_change("functions", self.delete_schema_function)
|
||||||
|
|
||||||
def _handle_element_change(
|
def _handle_element_change(
|
||||||
self, schema_file, saved_yaml_dict, new_yaml_dict, env_var_changes, dict_key: str, delete
|
self, schema_file, saved_yaml_dict, new_yaml_dict, env_var_changes, dict_key: str, delete
|
||||||
@@ -917,7 +976,7 @@ class PartialParsing:
|
|||||||
for child_id in self.saved_manifest.child_map[unique_id]:
|
for child_id in self.saved_manifest.child_map[unique_id]:
|
||||||
if child_id.startswith("test") and child_id in self.saved_manifest.nodes:
|
if child_id.startswith("test") and child_id in self.saved_manifest.nodes:
|
||||||
child_test = self.saved_manifest.nodes[child_id]
|
child_test = self.saved_manifest.nodes[child_id]
|
||||||
if child_test.attached_node:
|
if isinstance(child_test, GenericTestNode) and child_test.attached_node:
|
||||||
if child_test.attached_node in self.saved_manifest.nodes:
|
if child_test.attached_node in self.saved_manifest.nodes:
|
||||||
attached_node = self.saved_manifest.nodes[child_test.attached_node]
|
attached_node = self.saved_manifest.nodes[child_test.attached_node]
|
||||||
self.update_in_saved(attached_node.file_id)
|
self.update_in_saved(attached_node.file_id)
|
||||||
@@ -1080,6 +1139,24 @@ class PartialParsing:
|
|||||||
schema_file.unit_tests.remove(unique_id)
|
schema_file.unit_tests.remove(unique_id)
|
||||||
# No disabled unit tests yet
|
# No disabled unit tests yet
|
||||||
|
|
||||||
|
def delete_schema_function(self, schema_file: SchemaSourceFile, function_dict: dict) -> None:
|
||||||
|
function_name = function_dict["name"]
|
||||||
|
functions = schema_file.node_patches.copy()
|
||||||
|
for unique_id in functions:
|
||||||
|
if unique_id in self.saved_manifest.functions:
|
||||||
|
function = self.saved_manifest.functions[unique_id]
|
||||||
|
if function.name == function_name:
|
||||||
|
removed_function = self.saved_manifest.functions.pop(unique_id)
|
||||||
|
# For schema patches, recorded unique_ids live in node_patches (ndp)
|
||||||
|
if unique_id in schema_file.node_patches:
|
||||||
|
schema_file.node_patches.remove(unique_id)
|
||||||
|
# Schedule the function's SQL file for reparsing so the node is re-added
|
||||||
|
file_id = removed_function.file_id
|
||||||
|
if file_id and file_id in self.new_files:
|
||||||
|
self.saved_files[file_id] = deepcopy(self.new_files[file_id])
|
||||||
|
if file_id and file_id in self.saved_files:
|
||||||
|
self.add_to_pp_files(self.saved_files[file_id])
|
||||||
|
|
||||||
def get_schema_element(self, elem_list, elem_name):
|
def get_schema_element(self, elem_list, elem_name):
|
||||||
for element in elem_list:
|
for element in elem_list:
|
||||||
if "name" in element and element["name"] == elem_name:
|
if "name" in element and element["name"] == elem_name:
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from dbt import utils
|
|||||||
from dbt.artifacts.resources import ModelConfig, UnitTestConfig, UnitTestFormat
|
from dbt.artifacts.resources import ModelConfig, UnitTestConfig, UnitTestFormat
|
||||||
from dbt.config import RuntimeConfig
|
from dbt.config import RuntimeConfig
|
||||||
from dbt.context.context_config import ContextConfig
|
from dbt.context.context_config import ContextConfig
|
||||||
from dbt.context.providers import generate_parse_exposure, get_rendered
|
from dbt.context.providers import generate_parser_unit_test_context, get_rendered
|
||||||
from dbt.contracts.files import FileHash, SchemaSourceFile
|
from dbt.contracts.files import FileHash, SchemaSourceFile
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
from dbt.contracts.graph.model_config import UnitTestNodeConfig
|
from dbt.contracts.graph.model_config import UnitTestNodeConfig
|
||||||
@@ -100,12 +100,7 @@ class UnitTestManifestLoader:
|
|||||||
overrides=test_case.overrides,
|
overrides=test_case.overrides,
|
||||||
)
|
)
|
||||||
|
|
||||||
ctx = generate_parse_exposure(
|
ctx = generate_parser_unit_test_context(unit_test_node, self.root_project, self.manifest)
|
||||||
unit_test_node, # type: ignore
|
|
||||||
self.root_project,
|
|
||||||
self.manifest,
|
|
||||||
test_case.package_name,
|
|
||||||
)
|
|
||||||
get_rendered(unit_test_node.raw_code, ctx, unit_test_node, capture_macros=True)
|
get_rendered(unit_test_node.raw_code, ctx, unit_test_node, capture_macros=True)
|
||||||
# unit_test_node now has a populated refs/sources
|
# unit_test_node now has a populated refs/sources
|
||||||
|
|
||||||
@@ -173,6 +168,10 @@ class UnitTestManifestLoader:
|
|||||||
**common_fields,
|
**common_fields,
|
||||||
source_name=original_input_node.source_name, # needed for source lookup
|
source_name=original_input_node.source_name, # needed for source lookup
|
||||||
)
|
)
|
||||||
|
# In the case of multiple sources with the same name, we add the source schema name to the unique id.
|
||||||
|
# This additionally prevents duplicate CTE names during compilation.
|
||||||
|
input_node.unique_id = f"model.{original_input_node.package_name}.{original_input_node.source_name}__{input_name}"
|
||||||
|
|
||||||
# Sources need to go in the sources dictionary in order to create the right lookup
|
# Sources need to go in the sources dictionary in order to create the right lookup
|
||||||
self.unit_test_manifest.sources[input_node.unique_id] = input_node # type: ignore
|
self.unit_test_manifest.sources[input_node.unique_id] = input_node # type: ignore
|
||||||
|
|
||||||
@@ -291,8 +290,11 @@ class UnitTestParser(YamlReader):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if tested_model_node:
|
if tested_model_node:
|
||||||
unit_test_definition.depends_on.nodes.append(tested_model_node.unique_id)
|
if tested_model_node.config.enabled:
|
||||||
unit_test_definition.schema = tested_model_node.schema
|
unit_test_definition.depends_on.nodes.append(tested_model_node.unique_id)
|
||||||
|
unit_test_definition.schema = tested_model_node.schema
|
||||||
|
else:
|
||||||
|
unit_test_definition.config.enabled = False
|
||||||
|
|
||||||
# Check that format and type of rows matches for each given input,
|
# Check that format and type of rows matches for each given input,
|
||||||
# convert rows to a list of dictionaries, and add the unique_id of
|
# convert rows to a list of dictionaries, and add the unique_id of
|
||||||
@@ -303,7 +305,7 @@ class UnitTestParser(YamlReader):
|
|||||||
# for calculating state:modified
|
# for calculating state:modified
|
||||||
unit_test_definition.build_unit_test_checksum()
|
unit_test_definition.build_unit_test_checksum()
|
||||||
assert isinstance(self.yaml.file, SchemaSourceFile)
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
if unit_test_config.enabled:
|
if unit_test_definition.config.enabled:
|
||||||
self.manifest.add_unit_test(self.yaml.file, unit_test_definition)
|
self.manifest.add_unit_test(self.yaml.file, unit_test_definition)
|
||||||
else:
|
else:
|
||||||
self.manifest.add_disabled(self.yaml.file, unit_test_definition)
|
self.manifest.add_disabled(self.yaml.file, unit_test_definition)
|
||||||
@@ -493,6 +495,13 @@ def find_tested_model_node(
|
|||||||
model_version = model_name_split[1] if len(model_name_split) == 2 else None
|
model_version = model_name_split[1] if len(model_name_split) == 2 else None
|
||||||
|
|
||||||
tested_node = manifest.ref_lookup.find(model_name, current_project, model_version, manifest)
|
tested_node = manifest.ref_lookup.find(model_name, current_project, model_version, manifest)
|
||||||
|
if not tested_node:
|
||||||
|
disabled_node = manifest.disabled_lookup.find(
|
||||||
|
model_name, current_project, model_version, [NodeType.Model]
|
||||||
|
)
|
||||||
|
if disabled_node:
|
||||||
|
tested_node = disabled_node[0]
|
||||||
|
|
||||||
return tested_node
|
return tested_node
|
||||||
|
|
||||||
|
|
||||||
@@ -510,22 +519,36 @@ def process_models_for_unit_test(
|
|||||||
f"Unable to find model '{current_project}.{unit_test_def.model}' for "
|
f"Unable to find model '{current_project}.{unit_test_def.model}' for "
|
||||||
f"unit test '{unit_test_def.name}' in {unit_test_def.original_file_path}"
|
f"unit test '{unit_test_def.name}' in {unit_test_def.original_file_path}"
|
||||||
)
|
)
|
||||||
unit_test_def.depends_on.nodes.append(tested_node.unique_id)
|
if tested_node.config.enabled:
|
||||||
unit_test_def.schema = tested_node.schema
|
unit_test_def.depends_on.nodes.append(tested_node.unique_id)
|
||||||
|
unit_test_def.schema = tested_node.schema
|
||||||
|
else:
|
||||||
|
# If the model is disabled, the unit test should be disabled
|
||||||
|
unit_test_def.config.enabled = False
|
||||||
|
|
||||||
# The UnitTestDefinition should only have one "depends_on" at this point,
|
# The UnitTestDefinition should only have one "depends_on" at this point,
|
||||||
# the one that's found by the "model" field.
|
# the one that's found by the "model" field.
|
||||||
target_model_id = unit_test_def.depends_on.nodes[0]
|
target_model_id = unit_test_def.depends_on.nodes[0]
|
||||||
if target_model_id not in manifest.nodes:
|
if target_model_id not in manifest.nodes:
|
||||||
if target_model_id in manifest.disabled:
|
if target_model_id in manifest.disabled:
|
||||||
# The model is disabled, so we don't need to do anything (#10540)
|
# If the model is disabled, the unit test should be disabled
|
||||||
return
|
unit_test_def.config.enabled = False
|
||||||
else:
|
else:
|
||||||
# If we've reached here and the model is not disabled, throw an error
|
# If we've reached here and the model is not disabled, throw an error
|
||||||
raise ParsingError(
|
raise ParsingError(
|
||||||
f"Unit test '{unit_test_def.name}' references a model that does not exist: {target_model_id}"
|
f"Unit test '{unit_test_def.name}' references a model that does not exist: {target_model_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not unit_test_def.config.enabled:
|
||||||
|
# Ensure the unit test is disabled in the manifest
|
||||||
|
if unit_test_def.unique_id in manifest.unit_tests:
|
||||||
|
manifest.unit_tests.pop(unit_test_def.unique_id)
|
||||||
|
if unit_test_def.unique_id not in manifest.disabled:
|
||||||
|
manifest.add_disabled(manifest.files[unit_test_def.file_id], unit_test_def)
|
||||||
|
|
||||||
|
# The unit test is disabled, so we don't need to do any further processing (#10540)
|
||||||
|
return
|
||||||
|
|
||||||
target_model = manifest.nodes[target_model_id]
|
target_model = manifest.nodes[target_model_id]
|
||||||
assert isinstance(target_model, ModelNode)
|
assert isinstance(target_model, ModelNode)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from typing import Dict, List, Optional, Set, Type
|
from typing import Dict, Iterable, List, Optional, Set, Type
|
||||||
|
|
||||||
|
from dbt.adapters.base import BaseRelation
|
||||||
from dbt.artifacts.schemas.results import NodeStatus
|
from dbt.artifacts.schemas.results import NodeStatus
|
||||||
from dbt.artifacts.schemas.run import RunResult
|
from dbt.artifacts.schemas.run import RunResult
|
||||||
from dbt.cli.flags import Flags
|
from dbt.cli.flags import Flags
|
||||||
@@ -64,6 +65,22 @@ class BuildTask(RunTask):
|
|||||||
resource_types.remove(NodeType.Unit)
|
resource_types.remove(NodeType.Unit)
|
||||||
return list(resource_types)
|
return list(resource_types)
|
||||||
|
|
||||||
|
def get_model_schemas(self, adapter, selected_uids: Iterable[str]) -> Set[BaseRelation]:
|
||||||
|
|
||||||
|
# Get model schemas as usual
|
||||||
|
model_schemas = super().get_model_schemas(adapter, selected_uids)
|
||||||
|
|
||||||
|
# Get function schemas
|
||||||
|
function_schemas: Set[BaseRelation] = set()
|
||||||
|
for function in (
|
||||||
|
self.manifest.functions.values() if self.manifest else []
|
||||||
|
): # functionally the manifest will never be None as we do an assert in super().get_model_schemas(...)
|
||||||
|
if function.unique_id in selected_uids:
|
||||||
|
relation = adapter.Relation.create_from(self.config, function)
|
||||||
|
function_schemas.add(relation.without_identifier())
|
||||||
|
|
||||||
|
return model_schemas.union(function_schemas)
|
||||||
|
|
||||||
# overrides get_graph_queue in runnable.py
|
# overrides get_graph_queue in runnable.py
|
||||||
def get_graph_queue(self) -> GraphQueue:
|
def get_graph_queue(self) -> GraphQueue:
|
||||||
# Following uses self.selection_arg and self.exclusion_arg
|
# Following uses self.selection_arg and self.exclusion_arg
|
||||||
@@ -72,12 +89,14 @@ class BuildTask(RunTask):
|
|||||||
# selector including unit tests
|
# selector including unit tests
|
||||||
full_selector = self.get_node_selector(no_unit_tests=False)
|
full_selector = self.get_node_selector(no_unit_tests=False)
|
||||||
# selected node unique_ids with unit_tests
|
# selected node unique_ids with unit_tests
|
||||||
full_selected_nodes = full_selector.get_selected(spec)
|
full_selected_nodes = full_selector.get_selected(spec=spec, warn_on_no_nodes=False)
|
||||||
|
|
||||||
# This selector removes the unit_tests from the selector
|
# This selector removes the unit_tests from the selector
|
||||||
selector_wo_unit_tests = self.get_node_selector(no_unit_tests=True)
|
selector_wo_unit_tests = self.get_node_selector(no_unit_tests=True)
|
||||||
# selected node unique_ids without unit_tests
|
# selected node unique_ids without unit_tests
|
||||||
selected_nodes_wo_unit_tests = selector_wo_unit_tests.get_selected(spec)
|
selected_nodes_wo_unit_tests = selector_wo_unit_tests.get_selected(
|
||||||
|
spec=spec, warn_on_no_nodes=False
|
||||||
|
)
|
||||||
|
|
||||||
# Get the difference in the sets of nodes with and without unit tests and
|
# Get the difference in the sets of nodes with and without unit tests and
|
||||||
# save it
|
# save it
|
||||||
|
|||||||
@@ -119,23 +119,97 @@ class DepsTask(BaseTask):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def check_for_duplicate_packages(self, packages_yml):
|
def check_for_duplicate_packages(self, packages_yml):
|
||||||
"""Loop through contents of `packages.yml` to ensure no duplicate package names + versions.
|
"""Loop through contents of `packages.yml` to remove entries that match the package being added.
|
||||||
|
|
||||||
This duplicate check will take into consideration exact match of a package name, as well as
|
This method is called only during `dbt deps --add-package` to check if the package
|
||||||
a check to see if a package name exists within a name (i.e. a package name inside a git URL).
|
being added already exists in packages.yml. It uses substring matching to identify
|
||||||
|
duplicates, which means it will match across different package sources. For example,
|
||||||
|
adding a hub package "dbt-labs/dbt_utils" will remove an existing git package
|
||||||
|
"https://github.com/dbt-labs/dbt-utils.git" since both contain "dbt_utils" or "dbt-utils".
|
||||||
|
|
||||||
|
The matching is flexible to handle both underscore and hyphen variants of package names,
|
||||||
|
as git repos often use hyphens (dbt-utils) while package names use underscores (dbt_utils).
|
||||||
|
Word boundaries (/, .) are enforced to prevent false matches like "dbt-core" matching
|
||||||
|
"dbt-core-utils".
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
packages_yml (dict): In-memory read of `packages.yml` contents
|
packages_yml (dict): In-memory read of `packages.yml` contents
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Updated or untouched packages_yml contents
|
dict: Updated packages_yml contents with matching packages removed
|
||||||
"""
|
"""
|
||||||
for i, pkg_entry in enumerate(packages_yml["packages"]):
|
# Extract the package name for matching
|
||||||
for val in pkg_entry.values():
|
package_name = self.args.add_package["name"]
|
||||||
if self.args.add_package["name"] in val:
|
|
||||||
del packages_yml["packages"][i]
|
|
||||||
|
|
||||||
fire_event(DepsFoundDuplicatePackage(removed_package=pkg_entry))
|
# Create variants for flexible matching (handle _ vs -)
|
||||||
|
# Check multiple variants to handle naming inconsistencies between hub and git
|
||||||
|
package_name_parts = [
|
||||||
|
package_name, # Original: "dbt-labs/dbt_utils"
|
||||||
|
package_name.replace("_", "-"), # Hyphens: "dbt-labs/dbt-utils"
|
||||||
|
package_name.replace("-", "_"), # Underscores: "dbt_labs/dbt_utils"
|
||||||
|
]
|
||||||
|
# Extract just the package name without org (after last /)
|
||||||
|
if "/" in package_name:
|
||||||
|
short_name = package_name.split("/")[-1]
|
||||||
|
package_name_parts.extend(
|
||||||
|
[
|
||||||
|
short_name, # "dbt_utils"
|
||||||
|
short_name.replace("_", "-"), # "dbt-utils"
|
||||||
|
short_name.replace("-", "_"), # "dbt_utils" (deduplicated)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove duplicates from package_name_parts
|
||||||
|
package_name_parts = list(set(package_name_parts))
|
||||||
|
|
||||||
|
# Iterate backwards to safely delete items without index shifting issues
|
||||||
|
for i in range(len(packages_yml["packages"]) - 1, -1, -1):
|
||||||
|
pkg_entry = packages_yml["packages"][i]
|
||||||
|
|
||||||
|
# Get the package identifier key (package type determines which key exists)
|
||||||
|
# This avoids iterating over non-string values like warn-unpinned: false
|
||||||
|
package_identifier = (
|
||||||
|
pkg_entry.get("package") # hub/registry package
|
||||||
|
or pkg_entry.get("git") # git package
|
||||||
|
or pkg_entry.get("local") # local package
|
||||||
|
or pkg_entry.get("tarball") # tarball package
|
||||||
|
or pkg_entry.get("private") # private package
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if any variant of the package name appears in the identifier
|
||||||
|
# Use word boundaries to avoid false matches (e.g., "dbt-core" shouldn't match "dbt-core-utils")
|
||||||
|
# Word boundaries are: start/end of string, /, or .
|
||||||
|
# Note: - and _ are NOT boundaries since they're used within compound package names
|
||||||
|
if package_identifier:
|
||||||
|
is_duplicate = False
|
||||||
|
for name_variant in package_name_parts:
|
||||||
|
if name_variant in package_identifier:
|
||||||
|
# Found a match, now verify it's not a substring of a larger word
|
||||||
|
# Check characters before and after the match
|
||||||
|
idx = package_identifier.find(name_variant)
|
||||||
|
start_ok = idx == 0 or package_identifier[idx - 1] in "/."
|
||||||
|
end_idx = idx + len(name_variant)
|
||||||
|
end_ok = (
|
||||||
|
end_idx == len(package_identifier)
|
||||||
|
or package_identifier[end_idx] in "/."
|
||||||
|
)
|
||||||
|
|
||||||
|
if start_ok and end_ok:
|
||||||
|
is_duplicate = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if is_duplicate:
|
||||||
|
del packages_yml["packages"][i]
|
||||||
|
# Filter out non-string values (like warn-unpinned boolean) before logging
|
||||||
|
# Note: Check for bool first since bool is a subclass of int in Python
|
||||||
|
loggable_package = {
|
||||||
|
k: v
|
||||||
|
for k, v in pkg_entry.items()
|
||||||
|
if not isinstance(v, bool)
|
||||||
|
and isinstance(v, (str, int, float))
|
||||||
|
and k != "unrendered"
|
||||||
|
}
|
||||||
|
fire_event(DepsFoundDuplicatePackage(removed_package=loggable_package))
|
||||||
|
|
||||||
return packages_yml
|
return packages_yml
|
||||||
|
|
||||||
|
|||||||
@@ -126,6 +126,7 @@ class RetryTask(ConfiguredTask):
|
|||||||
result.unique_id
|
result.unique_id
|
||||||
for result in self.previous_results.results
|
for result in self.previous_results.results
|
||||||
if result.status in RETRYABLE_STATUSES
|
if result.status in RETRYABLE_STATUSES
|
||||||
|
# Avoid retrying operation nodes unless we are retrying the run-operation command
|
||||||
and not (
|
and not (
|
||||||
self.previous_command_name != "run-operation"
|
self.previous_command_name != "run-operation"
|
||||||
and result.unique_id.startswith("operation.")
|
and result.unique_id.startswith("operation.")
|
||||||
@@ -150,6 +151,11 @@ class RetryTask(ConfiguredTask):
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Tasks without get_graph_queue (e.g. run-operation) and no failed nodes to retry.
|
||||||
|
if not unique_ids and not hasattr(self.task_class, "get_graph_queue"):
|
||||||
|
# Return early with the previous results as the past invocation was successful
|
||||||
|
return self.previous_results
|
||||||
|
|
||||||
class TaskWrapper(self.task_class):
|
class TaskWrapper(self.task_class):
|
||||||
def get_graph_queue(self):
|
def get_graph_queue(self):
|
||||||
new_graph = self.graph.get_subset_graph(unique_ids)
|
new_graph = self.graph.get_subset_graph(unique_ids)
|
||||||
|
|||||||
@@ -3,14 +3,13 @@ import importlib
|
|||||||
import importlib.util
|
import importlib.util
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
from importlib import metadata as importlib_metadata
|
from importlib import metadata as importlib_metadata
|
||||||
from pathlib import Path
|
|
||||||
from typing import Iterator, List, Optional, Tuple
|
from typing import Iterator, List, Optional, Tuple
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
import dbt_common.semver as semver
|
import dbt_common.semver as semver
|
||||||
|
from dbt.__version__ import version as __version_string
|
||||||
from dbt_common.ui import green, yellow
|
from dbt_common.ui import green, yellow
|
||||||
|
|
||||||
PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json"
|
PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json"
|
||||||
@@ -233,16 +232,8 @@ def _resolve_version() -> str:
|
|||||||
try:
|
try:
|
||||||
return importlib_metadata.version("dbt-core")
|
return importlib_metadata.version("dbt-core")
|
||||||
except importlib_metadata.PackageNotFoundError:
|
except importlib_metadata.PackageNotFoundError:
|
||||||
pyproject_path = Path(__file__).resolve().parents[1] / "pyproject.toml"
|
# When running from source (not installed), use version from __version__.py
|
||||||
if not pyproject_path.exists():
|
return __version_string
|
||||||
raise RuntimeError("Unable to locate pyproject.toml to determine dbt-core version")
|
|
||||||
|
|
||||||
text = pyproject_path.read_text(encoding="utf-8")
|
|
||||||
match = re.search(r'^version\s*=\s*"(?P<version>[^"]+)"', text, re.MULTILINE)
|
|
||||||
if match:
|
|
||||||
return match.group("version")
|
|
||||||
|
|
||||||
raise RuntimeError("Unable to determine dbt-core version from pyproject.toml")
|
|
||||||
|
|
||||||
|
|
||||||
__version__ = _resolve_version()
|
__version__ = _resolve_version()
|
||||||
|
|||||||
206
core/hatch.toml
Normal file
206
core/hatch.toml
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
[version]
|
||||||
|
path = "dbt/__version__.py"
|
||||||
|
|
||||||
|
[build.targets.wheel]
|
||||||
|
packages = ["dbt"]
|
||||||
|
only-packages = true
|
||||||
|
exclude = [
|
||||||
|
"**/*.md",
|
||||||
|
]
|
||||||
|
artifacts = [
|
||||||
|
"dbt/include/**/*.py",
|
||||||
|
"dbt/include/**/*.sql",
|
||||||
|
"dbt/include/**/*.yml",
|
||||||
|
"dbt/include/**/*.html",
|
||||||
|
"dbt/include/**/*.md",
|
||||||
|
"dbt/include/**/.gitkeep",
|
||||||
|
"dbt/include/**/.gitignore",
|
||||||
|
"dbt/task/docs/**/*.html",
|
||||||
|
"dbt/jsonschemas/**/*.json",
|
||||||
|
"dbt/py.typed",
|
||||||
|
# Directories without __init__.py (namespace packages)
|
||||||
|
"dbt/artifacts/resources/v1/**/*.py",
|
||||||
|
"dbt/artifacts/utils/**/*.py",
|
||||||
|
"dbt/event_time/**/*.py",
|
||||||
|
"dbt/docs/source/**/*.py",
|
||||||
|
"dbt/tests/util.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
[build.targets.sdist]
|
||||||
|
include = [
|
||||||
|
"/dbt",
|
||||||
|
"/README.md",
|
||||||
|
]
|
||||||
|
|
||||||
|
[build.targets.sdist.force-include]
|
||||||
|
"dbt/task/docs/index.html" = "dbt/task/docs/index.html"
|
||||||
|
|
||||||
|
[envs.default]
|
||||||
|
# Python 3.10-3.11 required locally due to flake8==4.0.1 compatibility
|
||||||
|
# CI uses [envs.ci] which doesn't set python, allowing matrix testing
|
||||||
|
python = "3.11"
|
||||||
|
dependencies = [
|
||||||
|
# Git dependencies for development against main branches
|
||||||
|
"dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters",
|
||||||
|
"dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter",
|
||||||
|
"dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@main",
|
||||||
|
"dbt-postgres @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres",
|
||||||
|
# Code quality
|
||||||
|
"pre-commit~=3.7.0",
|
||||||
|
"black>=24.3,<25.0",
|
||||||
|
"flake8==4.0.1", # requires python <3.12
|
||||||
|
"mypy==1.4.1", # update requires code fixes
|
||||||
|
"isort==5.13.2",
|
||||||
|
# Testing
|
||||||
|
"pytest>=7.0,<8.0",
|
||||||
|
"pytest-xdist~=3.6",
|
||||||
|
"pytest-csv~=3.0",
|
||||||
|
"pytest-cov",
|
||||||
|
"pytest-dotenv",
|
||||||
|
"pytest-mock",
|
||||||
|
"pytest-split",
|
||||||
|
"pytest-logbook~=1.2",
|
||||||
|
"logbook<1.9",
|
||||||
|
"flaky",
|
||||||
|
"freezegun>=1.5.1",
|
||||||
|
"hypothesis",
|
||||||
|
"mocker",
|
||||||
|
# Debugging
|
||||||
|
"ipdb",
|
||||||
|
"ddtrace==2.21.3",
|
||||||
|
# Documentation
|
||||||
|
"docutils",
|
||||||
|
"sphinx",
|
||||||
|
# Type stubs
|
||||||
|
"types-docutils",
|
||||||
|
"types-PyYAML",
|
||||||
|
"types-Jinja2",
|
||||||
|
"types-jsonschema",
|
||||||
|
"types-mock",
|
||||||
|
"types-protobuf>=5.0,<6.0",
|
||||||
|
"types-python-dateutil",
|
||||||
|
"types-pytz",
|
||||||
|
"types-requests",
|
||||||
|
"types-setuptools",
|
||||||
|
# Other
|
||||||
|
"pip-tools",
|
||||||
|
"protobuf>=6.0,<7.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[envs.default.scripts]
|
||||||
|
# Setup commands
|
||||||
|
setup = [
|
||||||
|
"pip install -e .",
|
||||||
|
"pre-commit install",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Code quality commands
|
||||||
|
code-quality = "pre-commit run --all-files --show-diff-on-failure"
|
||||||
|
lint = [
|
||||||
|
"pre-commit run flake8-check --hook-stage manual --all-files",
|
||||||
|
"pre-commit run mypy-check --hook-stage manual --all-files",
|
||||||
|
]
|
||||||
|
flake8 = "pre-commit run flake8-check --hook-stage manual --all-files"
|
||||||
|
mypy = "pre-commit run mypy-check --hook-stage manual --all-files"
|
||||||
|
black = "pre-commit run black-check --hook-stage manual --all-files"
|
||||||
|
|
||||||
|
# Testing commands
|
||||||
|
unit-tests = "python -m pytest {args} ../tests/unit"
|
||||||
|
integration-tests = "python -m pytest -nauto {args} ../tests/functional"
|
||||||
|
integration-tests-fail-fast = "python -m pytest -x -nauto {args} ../tests/functional"
|
||||||
|
test = [
|
||||||
|
"python -m pytest ../tests/unit",
|
||||||
|
"pre-commit run black-check --hook-stage manual --all-files",
|
||||||
|
"pre-commit run flake8-check --hook-stage manual --all-files",
|
||||||
|
"pre-commit run mypy-check --hook-stage manual --all-files",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Database setup
|
||||||
|
setup-db = [
|
||||||
|
"docker compose up -d database",
|
||||||
|
"bash ../test/setup_db.sh",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Utility commands
|
||||||
|
clean = [
|
||||||
|
"rm -f .coverage",
|
||||||
|
"rm -f .coverage.*",
|
||||||
|
"rm -rf .eggs/",
|
||||||
|
"rm -rf build/",
|
||||||
|
"rm -rf dbt.egg-info/",
|
||||||
|
"rm -f dbt_project.yml",
|
||||||
|
"rm -rf dist/",
|
||||||
|
"find . -type f -name '*.pyc' -delete",
|
||||||
|
"find . -type d -name __pycache__ -exec rm -rf {} +",
|
||||||
|
]
|
||||||
|
json-schema = "python ../scripts/collect-artifact-schema.py --path ../schemas"
|
||||||
|
|
||||||
|
[envs.build]
|
||||||
|
python = "3.11"
|
||||||
|
detached = true
|
||||||
|
dependencies = [
|
||||||
|
"wheel",
|
||||||
|
"twine",
|
||||||
|
"check-wheel-contents",
|
||||||
|
]
|
||||||
|
|
||||||
|
[envs.build.scripts]
|
||||||
|
check-all = [
|
||||||
|
"- check-wheel",
|
||||||
|
"- check-sdist",
|
||||||
|
]
|
||||||
|
check-wheel = [
|
||||||
|
"twine check dist/*",
|
||||||
|
"find ./dist/dbt_core-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
|
||||||
|
"pip freeze | grep dbt-core",
|
||||||
|
"dbt --version",
|
||||||
|
]
|
||||||
|
check-sdist = [
|
||||||
|
"check-wheel-contents dist/*.whl --ignore W007,W008",
|
||||||
|
"find ./dist/dbt_core-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
|
||||||
|
"pip freeze | grep dbt-core",
|
||||||
|
"dbt --version",
|
||||||
|
]
|
||||||
|
|
||||||
|
# CI environment - isolated environment with test dependencies
|
||||||
|
[envs.ci]
|
||||||
|
dependencies = [
|
||||||
|
# Git dependencies for development against main branches
|
||||||
|
"dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters",
|
||||||
|
"dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter",
|
||||||
|
"dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@main",
|
||||||
|
"dbt-postgres @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres",
|
||||||
|
# Testing
|
||||||
|
"pytest>=7.0,<8.0",
|
||||||
|
"pytest-cov",
|
||||||
|
"pytest-xdist~=3.6",
|
||||||
|
"pytest-csv~=3.0",
|
||||||
|
"pytest-dotenv",
|
||||||
|
"pytest-mock",
|
||||||
|
"pytest-split",
|
||||||
|
"ddtrace==2.21.3",
|
||||||
|
"flaky",
|
||||||
|
"freezegun>=1.5.1",
|
||||||
|
"hypothesis",
|
||||||
|
]
|
||||||
|
|
||||||
|
pre-install-commands = [
|
||||||
|
"pip install -e .",
|
||||||
|
]
|
||||||
|
|
||||||
|
[envs.ci.env-vars]
|
||||||
|
DBT_TEST_USER_1 = "dbt_test_user_1"
|
||||||
|
DBT_TEST_USER_2 = "dbt_test_user_2"
|
||||||
|
DBT_TEST_USER_3 = "dbt_test_user_3"
|
||||||
|
|
||||||
|
[envs.ci.scripts]
|
||||||
|
unit-tests = "python -m pytest --cov=dbt --cov-report=xml {args} ../tests/unit"
|
||||||
|
# Run as single command to avoid pre-install-commands running twice
|
||||||
|
integration-tests = """
|
||||||
|
python -m pytest --cov=dbt --cov-append --cov-report=xml {args} ../tests/functional -k "not tests/functional/graph_selection" && \
|
||||||
|
python -m pytest --cov=dbt --cov-append --cov-report=xml {args} ../tests/functional/graph_selection
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Note: Python version matrix is handled by GitHub Actions CI, not hatch.
|
||||||
|
# This avoids running tests 4x per job. The CI sets up the Python version
|
||||||
|
# and hatch uses whatever Python is active.
|
||||||
@@ -1,38 +1,12 @@
|
|||||||
[tool.setuptools]
|
|
||||||
package-dir = {"" = "."}
|
|
||||||
include-package-data = true
|
|
||||||
zip-safe = false
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = [
|
|
||||||
"dbt",
|
|
||||||
"dbt.*",
|
|
||||||
]
|
|
||||||
|
|
||||||
# this needs to match MANIFEST.in for the wheels
|
|
||||||
[tool.setuptools.package-data]
|
|
||||||
"dbt" = [
|
|
||||||
"include/**/*.py",
|
|
||||||
"include/**/*.sql",
|
|
||||||
"include/**/*.yml",
|
|
||||||
"include/**/*.html",
|
|
||||||
"include/**/*.md",
|
|
||||||
"include/**/.gitkeep",
|
|
||||||
"include/**/.gitignore",
|
|
||||||
"task/docs/**/*.html",
|
|
||||||
"jsonschemas/**/*.json",
|
|
||||||
"py.typed",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "dbt-core"
|
name = "dbt-core"
|
||||||
version = "1.11.0b4"
|
dynamic = ["version"]
|
||||||
description = "With dbt, data analysts and engineers can build analytics the way engineers build applications."
|
description = "With dbt, data analysts and engineers can build analytics the way engineers build applications."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.10"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
license-files = ["License.md"] # License.md copied to core/ by build script even though it lives at the root by convention
|
license-files = { globs = ["LICENSE"] }
|
||||||
keywords = []
|
keywords = []
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "dbt Labs", email = "info@dbtlabs.com" },
|
{ name = "dbt Labs", email = "info@dbtlabs.com" },
|
||||||
@@ -82,7 +56,7 @@ dependencies = [
|
|||||||
# Minor versions for these are expected to be backwards-compatible
|
# Minor versions for these are expected to be backwards-compatible
|
||||||
"dbt-common>=1.27.0,<2.0",
|
"dbt-common>=1.27.0,<2.0",
|
||||||
"dbt-adapters>=1.15.5,<2.0",
|
"dbt-adapters>=1.15.5,<2.0",
|
||||||
"dbt-protos>=1.0.375,<2.0",
|
"dbt-protos>=1.0.397,<2.0",
|
||||||
"pydantic<3",
|
"pydantic<3",
|
||||||
# ----
|
# ----
|
||||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
||||||
@@ -102,9 +76,10 @@ Changelog = "https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md"
|
|||||||
[project.scripts]
|
[project.scripts]
|
||||||
dbt = "dbt.cli.main:cli"
|
dbt = "dbt.cli.main:cli"
|
||||||
|
|
||||||
|
[tool.hatch.version]
|
||||||
|
path = "dbt/__version__.py"
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = [
|
requires = ["hatchling"]
|
||||||
"setuptools>=61",
|
build-backend = "hatchling.build"
|
||||||
"wheel",
|
|
||||||
]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|||||||
@@ -1,26 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
"""Legacy setuptools shim retained for compatibility with existing workflows. Will be removed in a future version."""
|
|
||||||
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
# the user has a downlevel version of setuptools.
|
|
||||||
# ----
|
|
||||||
# dbt-core uses these packages deeply, throughout the codebase, and there have been breaking changes in past patch releases (even though these are major-version-one).
|
|
||||||
# Pin to the patch or minor version, and bump in each new minor version of dbt-core.
|
|
||||||
# ----
|
|
||||||
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
|
|
||||||
# with major versions in each new minor version of dbt-core.
|
|
||||||
# ----
|
|
||||||
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
|
|
||||||
# and check compatibility / bump in each new minor version of dbt-core.
|
|
||||||
# ----
|
|
||||||
# These are major-version-0 packages also maintained by dbt-labs.
|
|
||||||
# Accept patches but avoid automatically updating past a set minor version range.
|
|
||||||
# Minor versions for these are expected to be backwards-compatible
|
|
||||||
# ----
|
|
||||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
|
||||||
# ----
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
setup()
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters
|
|
||||||
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter
|
|
||||||
git+https://github.com/dbt-labs/dbt-common.git@main
|
|
||||||
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres
|
|
||||||
black==24.3.0
|
|
||||||
bumpversion
|
|
||||||
ddtrace==2.21.3
|
|
||||||
docutils
|
|
||||||
flake8==4.0.1
|
|
||||||
flaky
|
|
||||||
freezegun>=1.5.1
|
|
||||||
hypothesis
|
|
||||||
ipdb
|
|
||||||
isort==5.13.2
|
|
||||||
mypy==1.4.1
|
|
||||||
pip-tools
|
|
||||||
pre-commit
|
|
||||||
protobuf>=6.0,<7.0
|
|
||||||
pytest>=7.4,<8.0
|
|
||||||
pytest-cov
|
|
||||||
pytest-csv>=3.0,<4.0
|
|
||||||
pytest-dotenv
|
|
||||||
pytest-mock
|
|
||||||
pytest-split
|
|
||||||
pytest-xdist
|
|
||||||
sphinx
|
|
||||||
tox>=3.13
|
|
||||||
types-docutils
|
|
||||||
types-PyYAML
|
|
||||||
types-Jinja2
|
|
||||||
types-jsonschema
|
|
||||||
types-mock
|
|
||||||
types-protobuf>=5.0,<6.0
|
|
||||||
types-python-dateutil
|
|
||||||
types-pytz
|
|
||||||
types-requests
|
|
||||||
types-setuptools
|
|
||||||
mocker
|
|
||||||
@@ -23,7 +23,7 @@ services:
|
|||||||
# Run `make .env` to set $USER_ID and $GROUP_ID
|
# Run `make .env` to set $USER_ID and $GROUP_ID
|
||||||
USER_ID: ${USER_ID:-}
|
USER_ID: ${USER_ID:-}
|
||||||
GROUP_ID: ${GROUP_ID:-}
|
GROUP_ID: ${GROUP_ID:-}
|
||||||
command: "/root/.virtualenvs/dbt/bin/pytest"
|
command: "bash -c 'cd core && hatch run ci:unit-tests'"
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_TEST_HOST: "database"
|
POSTGRES_TEST_HOST: "database"
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
-e ./core
|
|
||||||
@@ -17,15 +17,11 @@ rm -rf "$DBT_PATH"/core/build
|
|||||||
|
|
||||||
mkdir -p "$DBT_PATH"/dist
|
mkdir -p "$DBT_PATH"/dist
|
||||||
|
|
||||||
# Copy License.md to core/ for inclusion in distribution (required by Apache 2.0)
|
|
||||||
# The license-files in pyproject.toml references it relative to core/
|
|
||||||
cp "$DBT_PATH"/License.md "$DBT_PATH"/core/License.md
|
|
||||||
|
|
||||||
cd "$DBT_PATH"/core
|
cd "$DBT_PATH"/core
|
||||||
$PYTHON_BIN -m pip install --upgrade build
|
$PYTHON_BIN -m pip install --upgrade hatch
|
||||||
$PYTHON_BIN -m build --outdir "$DBT_PATH/dist"
|
hatch build --clean
|
||||||
|
|
||||||
# Clean up License.md that was copied to core/ for build
|
# Move built distributions to top-level dist/
|
||||||
rm -f "$DBT_PATH/core/License.md"
|
mv "$DBT_PATH"/core/dist/* "$DBT_PATH"/dist/
|
||||||
|
|
||||||
set +x
|
set +x
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Set environment variables required for integration tests
|
# Set environment variables required for integration tests
|
||||||
|
# This is used in the release workflow to set the environment variables for the integration tests
|
||||||
echo "DBT_INVOCATION_ENV=github-actions" >> $GITHUB_ENV
|
echo "DBT_INVOCATION_ENV=github-actions" >> $GITHUB_ENV
|
||||||
echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV
|
echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV
|
||||||
echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV
|
echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV
|
||||||
|
|||||||
@@ -113,3 +113,10 @@ models:
|
|||||||
to: source('test_source', 'test_table')
|
to: source('test_source', 'test_table')
|
||||||
to_columns: [id]
|
to_columns: [id]
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
stateful_generate_alias_name_macros_sql = """
|
||||||
|
{% macro generate_alias_name(custom_alias_name, node) -%}
|
||||||
|
{{ node.name }}_{{ var("state", "dev") }}
|
||||||
|
{%- endmacro %}
|
||||||
|
"""
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from dbt.artifacts.resources import RefArgs
|
from dbt.artifacts.resources import RefArgs
|
||||||
@@ -17,6 +20,7 @@ from tests.functional.constraints.fixtures import (
|
|||||||
model_foreign_key_model_node_not_found_schema_yml,
|
model_foreign_key_model_node_not_found_schema_yml,
|
||||||
model_foreign_key_model_schema_yml,
|
model_foreign_key_model_schema_yml,
|
||||||
model_foreign_key_source_schema_yml,
|
model_foreign_key_source_schema_yml,
|
||||||
|
stateful_generate_alias_name_macros_sql,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -239,3 +243,57 @@ class TestColumnLevelForeignKeyConstraintRefSyntaxError:
|
|||||||
match="Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model my_model: invalid.",
|
match="Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model my_model: invalid.",
|
||||||
):
|
):
|
||||||
run_dbt(["parse"])
|
run_dbt(["parse"])
|
||||||
|
|
||||||
|
|
||||||
|
class BaseForeignKeyDeferState:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def macros(self):
|
||||||
|
return {
|
||||||
|
"generate_alias_name.sql": stateful_generate_alias_name_macros_sql,
|
||||||
|
}
|
||||||
|
|
||||||
|
def copy_state(self, project_root):
|
||||||
|
state_path = os.path.join(project_root, "state")
|
||||||
|
if not os.path.exists(state_path):
|
||||||
|
os.makedirs(state_path)
|
||||||
|
shutil.copyfile(
|
||||||
|
f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestModelLevelForeignKeyConstraintRefToDeferRelation(BaseForeignKeyDeferState):
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"constraints_schema.yml": model_foreign_key_model_schema_yml,
|
||||||
|
"my_model.sql": "select 1 as id",
|
||||||
|
"my_model_to.sql": "select 1 as id",
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_model_level_fk_to_defer_relation(self, project):
|
||||||
|
results = run_dbt(["run", "--vars", "state: prod"])
|
||||||
|
self.copy_state(project.project_root)
|
||||||
|
|
||||||
|
results = run_dbt(["compile", "--defer", "--state", "state"])
|
||||||
|
|
||||||
|
my_model_node = [r.node for r in results.results if r.node.name == "my_model"][0]
|
||||||
|
assert my_model_node.constraints[0].to.split(".")[-1] == '"my_model_to_prod"'
|
||||||
|
|
||||||
|
|
||||||
|
class TestColumnLevelForeignKeyConstraintToRefDeferRelation(BaseForeignKeyDeferState):
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"constraints_schema.yml": model_foreign_key_model_column_schema_yml,
|
||||||
|
"my_model.sql": "select 1 as id",
|
||||||
|
"my_model_to.sql": "select 1 as id",
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_column_level_fk_to_defer_relation(self, project):
|
||||||
|
results = run_dbt(["run", "--vars", "state: prod"])
|
||||||
|
self.copy_state(project.project_root)
|
||||||
|
|
||||||
|
results = run_dbt(["compile", "--defer", "--state", "state"])
|
||||||
|
|
||||||
|
my_model_node = [r.node for r in results.results if r.node.name == "my_model"][0]
|
||||||
|
assert my_model_node.columns["id"].constraints[0].to.split(".")[-1] == '"my_model_to_prod"'
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -656,3 +656,151 @@ sources:
|
|||||||
tables:
|
tables:
|
||||||
- name: customers
|
- name: customers
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# Fixtures for test_removed_test_state.py
|
||||||
|
sample_test_sql = """
|
||||||
|
{% test sample_test(model, column_name) %}
|
||||||
|
select * from {{ model }} where {{ column_name }} is null
|
||||||
|
{% endtest %}
|
||||||
|
"""
|
||||||
|
|
||||||
|
removed_test_model_sql = """
|
||||||
|
select 1 as id
|
||||||
|
"""
|
||||||
|
|
||||||
|
removed_test_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: model_a
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_tests:
|
||||||
|
- sample_test
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Fixtures for test_modified_state.py - varchar/numeric size changes
|
||||||
|
varchar_size_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: name
|
||||||
|
data_type: varchar(5)
|
||||||
|
"""
|
||||||
|
|
||||||
|
varchar_size_increased_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: name
|
||||||
|
data_type: varchar(20)
|
||||||
|
"""
|
||||||
|
|
||||||
|
numeric_precision_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: amount
|
||||||
|
data_type: numeric(10,2)
|
||||||
|
"""
|
||||||
|
|
||||||
|
numeric_precision_increased_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: amount
|
||||||
|
data_type: numeric(12,4)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Case sensitivity test fixtures
|
||||||
|
varchar_size_uppercase_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: name
|
||||||
|
data_type: VARCHAR(5)
|
||||||
|
"""
|
||||||
|
|
||||||
|
varchar_size_lowercase_increased_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: name
|
||||||
|
data_type: varchar(20)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Unversioned model fixtures
|
||||||
|
varchar_size_unversioned_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: name
|
||||||
|
data_type: varchar(5)
|
||||||
|
"""
|
||||||
|
|
||||||
|
varchar_size_unversioned_increased_contract_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
models:
|
||||||
|
- name: table_model
|
||||||
|
config:
|
||||||
|
contract:
|
||||||
|
enforced: true
|
||||||
|
columns:
|
||||||
|
- name: id
|
||||||
|
data_type: integer
|
||||||
|
- name: name
|
||||||
|
data_type: varchar(20)
|
||||||
|
"""
|
||||||
|
|||||||
@@ -31,6 +31,8 @@ from tests.functional.defer_state.fixtures import (
|
|||||||
modified_model_constraint_schema_yml,
|
modified_model_constraint_schema_yml,
|
||||||
modified_semantic_model_schema_yml,
|
modified_semantic_model_schema_yml,
|
||||||
no_contract_schema_yml,
|
no_contract_schema_yml,
|
||||||
|
numeric_precision_contract_schema_yml,
|
||||||
|
numeric_precision_increased_contract_schema_yml,
|
||||||
schema_yml,
|
schema_yml,
|
||||||
seed_csv,
|
seed_csv,
|
||||||
semantic_model_schema_yml,
|
semantic_model_schema_yml,
|
||||||
@@ -38,6 +40,12 @@ from tests.functional.defer_state.fixtures import (
|
|||||||
table_model_now_view_sql,
|
table_model_now_view_sql,
|
||||||
table_model_sql,
|
table_model_sql,
|
||||||
unenforced_contract_schema_yml,
|
unenforced_contract_schema_yml,
|
||||||
|
varchar_size_contract_schema_yml,
|
||||||
|
varchar_size_increased_contract_schema_yml,
|
||||||
|
varchar_size_lowercase_increased_contract_schema_yml,
|
||||||
|
varchar_size_unversioned_contract_schema_yml,
|
||||||
|
varchar_size_unversioned_increased_contract_schema_yml,
|
||||||
|
varchar_size_uppercase_contract_schema_yml,
|
||||||
versioned_contract_schema_yml,
|
versioned_contract_schema_yml,
|
||||||
versioned_modified_contract_schema_yml,
|
versioned_modified_contract_schema_yml,
|
||||||
versioned_no_contract_schema_yml,
|
versioned_no_contract_schema_yml,
|
||||||
@@ -1161,3 +1169,95 @@ class TestChangedSemanticModelContents(BaseModifiedState):
|
|||||||
write_file(modified_semantic_model_schema_yml, "models", "schema.yml")
|
write_file(modified_semantic_model_schema_yml, "models", "schema.yml")
|
||||||
results = run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
results = run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||||
assert len(results) == 1
|
assert len(results) == 1
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionedContractVarcharSizeChange(BaseModifiedState):
|
||||||
|
"""
|
||||||
|
Test that changing varchar size (e.g., varchar(5) to varchar(20))
|
||||||
|
does not trigger a breaking change error for versioned models.
|
||||||
|
Per dbt docs, size/precision/scale changes should NOT be breaking changes.
|
||||||
|
Reproduces issue: https://github.com/dbt-labs/dbt-core/issues/11186
|
||||||
|
"""
|
||||||
|
|
||||||
|
MODEL_UNIQUE_ID = "model.test.table_model.v1"
|
||||||
|
|
||||||
|
def test_varchar_size_increase_not_breaking(self, project):
|
||||||
|
# Start with varchar(5)
|
||||||
|
write_file(varchar_size_contract_schema_yml, "models", "schema.yml")
|
||||||
|
self.run_and_save_state()
|
||||||
|
|
||||||
|
# Change to varchar(20) - should NOT be a breaking change
|
||||||
|
write_file(varchar_size_increased_contract_schema_yml, "models", "schema.yml")
|
||||||
|
|
||||||
|
# This should PASS without errors or breaking change warnings
|
||||||
|
_, logs = run_dbt_and_capture(
|
||||||
|
["run", "--models", "state:modified.contract", "--state", "./state"]
|
||||||
|
)
|
||||||
|
# Verify no breaking change warning/error
|
||||||
|
assert "breaking change" not in logs.lower()
|
||||||
|
assert "ContractBreakingChangeError" not in logs
|
||||||
|
|
||||||
|
def test_varchar_case_sensitivity_not_breaking(self, project):
|
||||||
|
# Start with VARCHAR(5) - uppercase
|
||||||
|
write_file(varchar_size_uppercase_contract_schema_yml, "models", "schema.yml")
|
||||||
|
self.run_and_save_state()
|
||||||
|
|
||||||
|
# Change to varchar(20) - lowercase with different size
|
||||||
|
# Should NOT be a breaking change (case-insensitive comparison)
|
||||||
|
write_file(varchar_size_lowercase_increased_contract_schema_yml, "models", "schema.yml")
|
||||||
|
|
||||||
|
# This should PASS without errors or breaking change warnings
|
||||||
|
_, logs = run_dbt_and_capture(
|
||||||
|
["run", "--models", "state:modified.contract", "--state", "./state"]
|
||||||
|
)
|
||||||
|
# Verify no breaking change warning/error
|
||||||
|
assert "breaking change" not in logs.lower()
|
||||||
|
assert "ContractBreakingChangeError" not in logs
|
||||||
|
|
||||||
|
def test_numeric_precision_increase_not_breaking(self, project):
|
||||||
|
# Start with numeric(10,2)
|
||||||
|
write_file(numeric_precision_contract_schema_yml, "models", "schema.yml")
|
||||||
|
# Need to modify the table_model.sql to have an amount column instead of name
|
||||||
|
modified_table_model = """
|
||||||
|
select 1 as id, 100.50 as amount
|
||||||
|
"""
|
||||||
|
write_file(modified_table_model, "models", "table_model.sql")
|
||||||
|
self.run_and_save_state()
|
||||||
|
|
||||||
|
# Change to numeric(12,4) - should NOT be a breaking change
|
||||||
|
write_file(numeric_precision_increased_contract_schema_yml, "models", "schema.yml")
|
||||||
|
|
||||||
|
# This should PASS without errors or breaking change warnings
|
||||||
|
_, logs = run_dbt_and_capture(
|
||||||
|
["run", "--models", "state:modified.contract", "--state", "./state"]
|
||||||
|
)
|
||||||
|
# Verify no breaking change warning/error
|
||||||
|
assert "breaking change" not in logs.lower()
|
||||||
|
assert "ContractBreakingChangeError" not in logs
|
||||||
|
|
||||||
|
|
||||||
|
class TestUnversionedContractVarcharSizeChange(BaseModifiedState):
|
||||||
|
"""
|
||||||
|
Test that changing varchar size for UNVERSIONED models behaves correctly.
|
||||||
|
Unversioned models should also NOT issue warnings for size-only changes.
|
||||||
|
This ensures versioned and unversioned models handle size changes consistently.
|
||||||
|
"""
|
||||||
|
|
||||||
|
MODEL_UNIQUE_ID = "model.test.table_model"
|
||||||
|
|
||||||
|
def test_varchar_size_increase_not_breaking_unversioned(self, project):
|
||||||
|
# Start with varchar(5) - no version
|
||||||
|
write_file(varchar_size_unversioned_contract_schema_yml, "models", "schema.yml")
|
||||||
|
self.run_and_save_state()
|
||||||
|
|
||||||
|
# Change to varchar(20) - should NOT be a breaking change
|
||||||
|
write_file(varchar_size_unversioned_increased_contract_schema_yml, "models", "schema.yml")
|
||||||
|
|
||||||
|
# For unversioned models, should also have no breaking change warnings
|
||||||
|
_, logs = run_dbt_and_capture(
|
||||||
|
["run", "--models", "state:modified.contract", "--state", "./state"]
|
||||||
|
)
|
||||||
|
# Verify no breaking change warning (consistent with versioned behavior)
|
||||||
|
assert "breaking change" not in logs.lower()
|
||||||
|
# Model should still be detected as modified and run successfully
|
||||||
|
assert "Completed successfully" in logs
|
||||||
|
|||||||
124
tests/functional/defer_state/test_removed_test_state.py
Normal file
124
tests/functional/defer_state/test_removed_test_state.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from dbt.exceptions import CompilationError
|
||||||
|
from dbt.tests.util import run_dbt
|
||||||
|
from tests.functional.defer_state.fixtures import (
|
||||||
|
removed_test_model_sql,
|
||||||
|
removed_test_schema_yml,
|
||||||
|
sample_test_sql,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRemovedGenericTest:
|
||||||
|
"""Test that removing a generic test while it's still referenced gives a clear error message."""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"model_a.sql": removed_test_model_sql,
|
||||||
|
"schema.yml": removed_test_schema_yml,
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def tests(self):
|
||||||
|
return {
|
||||||
|
"generic": {
|
||||||
|
"sample_test.sql": sample_test_sql,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def copy_state(self, project):
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
if not os.path.exists(f"{project.project_root}/state"):
|
||||||
|
os.makedirs(f"{project.project_root}/state")
|
||||||
|
shutil.copyfile(
|
||||||
|
f"{project.project_root}/target/manifest.json",
|
||||||
|
f"{project.project_root}/state/manifest.json",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_removed_generic_test_with_state_modified(self, project):
|
||||||
|
"""
|
||||||
|
Test that state:modified selector handles missing test macros gracefully.
|
||||||
|
|
||||||
|
Issue #10630: When a generic test is removed but still referenced, using
|
||||||
|
--select state:modified would crash with KeyError: None.
|
||||||
|
|
||||||
|
Solution: We check for None macro_uid in the state selector and raise a clear error.
|
||||||
|
"""
|
||||||
|
# Initial run - everything works
|
||||||
|
results = run_dbt(["run"])
|
||||||
|
assert len(results) == 1
|
||||||
|
|
||||||
|
# Save state
|
||||||
|
self.copy_state(project)
|
||||||
|
|
||||||
|
# Remove the generic test file but keep the reference in schema.yml
|
||||||
|
import os
|
||||||
|
|
||||||
|
test_file_path = os.path.join(project.project_root, "tests", "generic", "sample_test.sql")
|
||||||
|
if os.path.exists(test_file_path):
|
||||||
|
os.remove(test_file_path)
|
||||||
|
|
||||||
|
# The key bug fix: dbt run --select state:modified used to crash with KeyError: None
|
||||||
|
# After fix: it should give a clear compilation error during the selection phase
|
||||||
|
with pytest.raises(CompilationError, match="does not exist|macro or test"):
|
||||||
|
run_dbt(["run", "--select", "state:modified", "--state", "state"])
|
||||||
|
|
||||||
|
|
||||||
|
class TestRemovedGenericTestStateModifiedGracefulError:
|
||||||
|
"""Test that state:modified selector handles missing test macros gracefully."""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"model_a.sql": removed_test_model_sql,
|
||||||
|
"schema.yml": removed_test_schema_yml,
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def tests(self):
|
||||||
|
return {
|
||||||
|
"generic": {
|
||||||
|
"sample_test.sql": sample_test_sql,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def copy_state(self, project):
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
if not os.path.exists(f"{project.project_root}/state"):
|
||||||
|
os.makedirs(f"{project.project_root}/state")
|
||||||
|
shutil.copyfile(
|
||||||
|
f"{project.project_root}/target/manifest.json",
|
||||||
|
f"{project.project_root}/state/manifest.json",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_list_with_state_modified_after_test_removal(self, project):
|
||||||
|
"""
|
||||||
|
Test that state:modified selector handles missing test macros gracefully.
|
||||||
|
This exercises the selector_methods.py code path that was failing with KeyError: None.
|
||||||
|
"""
|
||||||
|
# Initial run - everything works
|
||||||
|
results = run_dbt(["run"])
|
||||||
|
assert len(results) == 1
|
||||||
|
|
||||||
|
# Save state
|
||||||
|
self.copy_state(project)
|
||||||
|
|
||||||
|
# Remove the generic test file but keep the reference in schema.yml
|
||||||
|
import os
|
||||||
|
|
||||||
|
test_file_path = os.path.join(project.project_root, "tests", "generic", "sample_test.sql")
|
||||||
|
if os.path.exists(test_file_path):
|
||||||
|
os.remove(test_file_path)
|
||||||
|
|
||||||
|
# dbt run with state:modified should not crash with KeyError: None
|
||||||
|
# After the fix, it should give a clear CompilationError about the missing test
|
||||||
|
# Previously this crashed with KeyError: None in recursively_check_macros_modified
|
||||||
|
with pytest.raises(
|
||||||
|
CompilationError, match="sample_test|does not exist|macro or generic test"
|
||||||
|
):
|
||||||
|
run_dbt(["run", "--select", "state:modified", "--state", "state"])
|
||||||
49
tests/functional/dependencies/test_add_package_edge_cases.py
Normal file
49
tests/functional/dependencies/test_add_package_edge_cases.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from dbt.tests.util import run_dbt
|
||||||
|
|
||||||
|
|
||||||
|
class TestAddPackageWithWarnUnpinnedInYaml:
|
||||||
|
"""Functional test: Adding packages works even with warn-unpinned in packages.yml.
|
||||||
|
|
||||||
|
This is a regression test for issue #9104. The bug occurred when packages.yml
|
||||||
|
contained warn-unpinned: false and dbt deps --add-package was run. The code
|
||||||
|
would fail with "TypeError: argument of type 'bool' is not iterable".
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def packages(self):
|
||||||
|
# Start with a git package that has warn-unpinned (matching the bug report)
|
||||||
|
return {
|
||||||
|
"packages": [
|
||||||
|
{
|
||||||
|
"git": "https://github.com/fivetran/dbt_amplitude",
|
||||||
|
"warn-unpinned": False, # This is the config that caused the bug
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def clean_start(self, project):
|
||||||
|
if os.path.exists("dbt_packages"):
|
||||||
|
shutil.rmtree("dbt_packages")
|
||||||
|
if os.path.exists("package-lock.yml"):
|
||||||
|
os.remove("package-lock.yml")
|
||||||
|
|
||||||
|
def test_add_package_with_warn_unpinned_in_yaml(self, clean_start):
|
||||||
|
"""Test that adding a package works when packages.yml contains warn-unpinned: false"""
|
||||||
|
# Before the fix, this would raise: TypeError: argument of type 'bool' is not iterable
|
||||||
|
# This matches the exact scenario from issue #9104
|
||||||
|
run_dbt(["deps", "--add-package", "dbt-labs/dbt_utils@1.0.0"])
|
||||||
|
|
||||||
|
with open("packages.yml") as fp:
|
||||||
|
contents = fp.read()
|
||||||
|
|
||||||
|
# Verify both packages are present
|
||||||
|
assert "dbt_amplitude" in contents or "fivetran/dbt_amplitude" in contents
|
||||||
|
assert "dbt-labs/dbt_utils" in contents or "dbt_utils" in contents
|
||||||
|
# The warn-unpinned should still be there
|
||||||
|
assert "warn-unpinned:" in contents or "warn_unpinned:" in contents
|
||||||
@@ -28,6 +28,11 @@ macros__custom_test_sql = """
|
|||||||
{% endtest %}
|
{% endtest %}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
models_pre_post_hook_in_config_sql = """
|
||||||
|
{{ config(post_hook="select 1", pre_hook="select 2") }}
|
||||||
|
|
||||||
|
select 1 as id
|
||||||
|
"""
|
||||||
|
|
||||||
bad_name_yaml = """
|
bad_name_yaml = """
|
||||||
version: 2
|
version: 2
|
||||||
@@ -197,6 +202,16 @@ models:
|
|||||||
my_custom_property: "It's over, I have the high ground"
|
my_custom_property: "It's over, I have the high ground"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
pre_post_hook_in_config_yaml = """
|
||||||
|
models:
|
||||||
|
- name: model_with_hook_configs
|
||||||
|
config:
|
||||||
|
post_hook: "select 1"
|
||||||
|
pre_hook: "select 2"
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
property_moved_to_config_yaml = """
|
property_moved_to_config_yaml = """
|
||||||
models:
|
models:
|
||||||
- name: models_trivial
|
- name: models_trivial
|
||||||
|
|||||||
@@ -40,8 +40,10 @@ from tests.functional.deprecations.fixtures import (
|
|||||||
invalid_deprecation_date_yaml,
|
invalid_deprecation_date_yaml,
|
||||||
models_custom_key_in_config_non_static_parser_sql,
|
models_custom_key_in_config_non_static_parser_sql,
|
||||||
models_custom_key_in_config_sql,
|
models_custom_key_in_config_sql,
|
||||||
|
models_pre_post_hook_in_config_sql,
|
||||||
models_trivial__model_sql,
|
models_trivial__model_sql,
|
||||||
multiple_custom_keys_in_config_yaml,
|
multiple_custom_keys_in_config_yaml,
|
||||||
|
pre_post_hook_in_config_yaml,
|
||||||
property_moved_to_config_yaml,
|
property_moved_to_config_yaml,
|
||||||
test_missing_arguments_property_yaml,
|
test_missing_arguments_property_yaml,
|
||||||
test_with_arguments_yaml,
|
test_with_arguments_yaml,
|
||||||
@@ -316,7 +318,6 @@ class TestDeprecatedInvalidDeprecationDate:
|
|||||||
"models.yml": invalid_deprecation_date_yaml,
|
"models.yml": invalid_deprecation_date_yaml,
|
||||||
}
|
}
|
||||||
|
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
def test_deprecated_invalid_deprecation_date(self, project):
|
def test_deprecated_invalid_deprecation_date(self, project):
|
||||||
event_catcher = EventCatcher(GenericJSONSchemaValidationDeprecation)
|
event_catcher = EventCatcher(GenericJSONSchemaValidationDeprecation)
|
||||||
note_catcher = EventCatcher(Note)
|
note_catcher = EventCatcher(Note)
|
||||||
@@ -362,7 +363,6 @@ class TestCustomKeyInConfigDeprecation:
|
|||||||
}
|
}
|
||||||
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
def test_custom_key_in_config_deprecation(self, project):
|
def test_custom_key_in_config_deprecation(self, project):
|
||||||
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
||||||
run_dbt(
|
run_dbt(
|
||||||
@@ -384,7 +384,6 @@ class TestCustomKeyInConfigSQLDeprecation:
|
|||||||
}
|
}
|
||||||
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
def test_custom_key_in_config_sql_deprecation(self, project):
|
def test_custom_key_in_config_sql_deprecation(self, project):
|
||||||
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
||||||
run_dbt(
|
run_dbt(
|
||||||
@@ -415,7 +414,6 @@ class TestMultipleCustomKeysInConfigDeprecation:
|
|||||||
}
|
}
|
||||||
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
def test_multiple_custom_keys_in_config_deprecation(self, project):
|
def test_multiple_custom_keys_in_config_deprecation(self, project):
|
||||||
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
||||||
run_dbt(
|
run_dbt(
|
||||||
@@ -442,7 +440,6 @@ class TestCustomKeyInObjectDeprecation:
|
|||||||
}
|
}
|
||||||
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
def test_custom_key_in_object_deprecation(self, project):
|
def test_custom_key_in_object_deprecation(self, project):
|
||||||
event_catcher = EventCatcher(CustomKeyInObjectDeprecation)
|
event_catcher = EventCatcher(CustomKeyInObjectDeprecation)
|
||||||
run_dbt(["parse", "--no-partial-parse"], callbacks=[event_catcher.catch])
|
run_dbt(["parse", "--no-partial-parse"], callbacks=[event_catcher.catch])
|
||||||
@@ -486,7 +483,6 @@ class TestCustomOutputPathInSourceFreshnessDeprecation:
|
|||||||
|
|
||||||
|
|
||||||
class TestHappyPathProjectHasNoDeprecations:
|
class TestHappyPathProjectHasNoDeprecations:
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_happy_path_project_has_no_deprecations(self, happy_path_project):
|
def test_happy_path_project_has_no_deprecations(self, happy_path_project):
|
||||||
event_cathcer = EventCatcher(DeprecationsSummary)
|
event_cathcer = EventCatcher(DeprecationsSummary)
|
||||||
@@ -498,7 +494,6 @@ class TestHappyPathProjectHasNoDeprecations:
|
|||||||
|
|
||||||
|
|
||||||
class TestBaseProjectHasNoDeprecations:
|
class TestBaseProjectHasNoDeprecations:
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_base_project_has_no_deprecations(self, project):
|
def test_base_project_has_no_deprecations(self, project):
|
||||||
event_cathcer = EventCatcher(DeprecationsSummary)
|
event_cathcer = EventCatcher(DeprecationsSummary)
|
||||||
@@ -707,7 +702,6 @@ class TestMissingPlusPrefixDeprecation:
|
|||||||
def project_config_update(self):
|
def project_config_update(self):
|
||||||
return {"seeds": {"path": {"enabled": True}}}
|
return {"seeds": {"path": {"enabled": True}}}
|
||||||
|
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_missing_plus_prefix_deprecation(self, project):
|
def test_missing_plus_prefix_deprecation(self, project):
|
||||||
event_catcher = EventCatcher(MissingPlusPrefixDeprecation)
|
event_catcher = EventCatcher(MissingPlusPrefixDeprecation)
|
||||||
@@ -721,7 +715,6 @@ class TestMissingPlusPrefixDeprecationSubPath:
|
|||||||
def project_config_update(self):
|
def project_config_update(self):
|
||||||
return {"seeds": {"path": {"+enabled": True, "sub_path": {"enabled": True}}}}
|
return {"seeds": {"path": {"+enabled": True, "sub_path": {"enabled": True}}}}
|
||||||
|
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_missing_plus_prefix_deprecation_sub_path(self, project):
|
def test_missing_plus_prefix_deprecation_sub_path(self, project):
|
||||||
event_catcher = EventCatcher(MissingPlusPrefixDeprecation)
|
event_catcher = EventCatcher(MissingPlusPrefixDeprecation)
|
||||||
@@ -735,7 +728,6 @@ class TestMissingPlusPrefixDeprecationCustomConfig:
|
|||||||
def project_config_update(self):
|
def project_config_update(self):
|
||||||
return {"seeds": {"path": {"custom_config": True, "sub_path": {"+enabled": True}}}}
|
return {"seeds": {"path": {"custom_config": True, "sub_path": {"+enabled": True}}}}
|
||||||
|
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_missing_plus_prefix_deprecation_sub_path(self, project):
|
def test_missing_plus_prefix_deprecation_sub_path(self, project):
|
||||||
event_catcher = EventCatcher(MissingPlusPrefixDeprecation)
|
event_catcher = EventCatcher(MissingPlusPrefixDeprecation)
|
||||||
@@ -749,7 +741,6 @@ class TestCustomConfigInDbtProjectYmlNoDeprecation:
|
|||||||
def project_config_update(self):
|
def project_config_update(self):
|
||||||
return {"seeds": {"path": {"+custom_config": True}}}
|
return {"seeds": {"path": {"+custom_config": True}}}
|
||||||
|
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_missing_plus_prefix_deprecation_sub_path(self, project):
|
def test_missing_plus_prefix_deprecation_sub_path(self, project):
|
||||||
note_catcher = EventCatcher(Note)
|
note_catcher = EventCatcher(Note)
|
||||||
@@ -782,10 +773,6 @@ class TestJsonSchemaValidationGating:
|
|||||||
dbt_private_run_jsonschema_validations: bool,
|
dbt_private_run_jsonschema_validations: bool,
|
||||||
expected_events: int,
|
expected_events: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
mocker.patch.dict(
|
|
||||||
os.environ,
|
|
||||||
{"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": dbt_private_run_jsonschema_validations},
|
|
||||||
)
|
|
||||||
|
|
||||||
if postgres_is_valid:
|
if postgres_is_valid:
|
||||||
supported_adapters_with_postgres = {
|
supported_adapters_with_postgres = {
|
||||||
@@ -911,7 +898,6 @@ class TestPropertyMovedToConfigDeprecation:
|
|||||||
"models.yml": property_moved_to_config_yaml,
|
"models.yml": property_moved_to_config_yaml,
|
||||||
}
|
}
|
||||||
|
|
||||||
@mock.patch.dict(os.environ, {"DBT_ENV_PRIVATE_RUN_JSONSCHEMA_VALIDATIONS": "True"})
|
|
||||||
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
def test_property_moved_to_config_deprecation(self, project):
|
def test_property_moved_to_config_deprecation(self, project):
|
||||||
event_catcher = EventCatcher(PropertyMovedToConfigDeprecation)
|
event_catcher = EventCatcher(PropertyMovedToConfigDeprecation)
|
||||||
@@ -920,3 +906,21 @@ class TestPropertyMovedToConfigDeprecation:
|
|||||||
callbacks=[event_catcher.catch],
|
callbacks=[event_catcher.catch],
|
||||||
)
|
)
|
||||||
assert len(event_catcher.caught_events) == 7
|
assert len(event_catcher.caught_events) == 7
|
||||||
|
|
||||||
|
|
||||||
|
class TestPrePostHookNoFalsePositiveDeprecation:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"model_hook_configs.sql": models_pre_post_hook_in_config_sql,
|
||||||
|
"schema.yml": pre_post_hook_in_config_yaml,
|
||||||
|
}
|
||||||
|
|
||||||
|
@mock.patch("dbt.jsonschemas.jsonschemas._JSONSCHEMA_SUPPORTED_ADAPTERS", {"postgres"})
|
||||||
|
def test_pre_post_hook_no_false_positive_deprecation(self, project):
|
||||||
|
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
||||||
|
run_dbt(
|
||||||
|
["parse", "--no-partial-parse", "--show-all-deprecations"],
|
||||||
|
callbacks=[event_catcher.catch],
|
||||||
|
)
|
||||||
|
assert len(event_catcher.caught_events) == 0
|
||||||
|
|||||||
222
tests/functional/deps/test_deps_with_vars.py
Normal file
222
tests/functional/deps/test_deps_with_vars.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
"""Test that dbt deps works when vars are used in dbt_project.yml without defaults.
|
||||||
|
|
||||||
|
The key behavior being tested:
|
||||||
|
- dbt deps uses lenient mode (require_vars=False) and succeeds even with missing vars
|
||||||
|
- dbt run/compile/build/debug use strict mode (require_vars=True) and show the right error messages
|
||||||
|
|
||||||
|
Expected behavior from reviewer's scenario:
|
||||||
|
1. dbt deps succeeds (doesn't need vars)
|
||||||
|
2. dbt run fails with error "Required var 'X' not found"
|
||||||
|
3. dbt run --vars succeeds when vars provided
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from dbt.tests.util import run_dbt, update_config_file
|
||||||
|
from dbt_common.exceptions import CompilationError
|
||||||
|
|
||||||
|
# Simple model for testing
|
||||||
|
model_sql = """
|
||||||
|
select 1 as id
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# Base class with common fixtures
|
||||||
|
class VarTestingBase:
|
||||||
|
"""Base class for var testing with common fixtures"""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {"test_model.sql": model_sql}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
return {
|
||||||
|
"models": {"test_project": {"+materialized": "{{ var('materialized_var', 'view') }}"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Test 1: Happy path - deps with defaults
|
||||||
|
class TestDepsSucceedsWithVarDefaults(VarTestingBase):
|
||||||
|
"""Test that dbt deps succeeds when vars have default values"""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
# config: +dataset: "{{ var('my_dataset', 'default') }}"
|
||||||
|
return {"models": {"test_project": {"+dataset": "dqm_{{ var('my_dataset', 'default') }}"}}}
|
||||||
|
|
||||||
|
def test_deps_succeeds(self, project):
|
||||||
|
# run: dbt deps
|
||||||
|
# assert: succeeds
|
||||||
|
results = run_dbt(["deps"])
|
||||||
|
assert results is None or results == []
|
||||||
|
|
||||||
|
|
||||||
|
# Test 2: Happy path - run with defaults
|
||||||
|
class TestRunSucceedsWithVarDefaults(VarTestingBase):
|
||||||
|
"""Test that dbt run succeeds when vars have default values"""
|
||||||
|
|
||||||
|
def test_run_succeeds(self, project):
|
||||||
|
# run: dbt run
|
||||||
|
# assert: succeeds
|
||||||
|
results = run_dbt(["run"])
|
||||||
|
assert len(results) == 1
|
||||||
|
|
||||||
|
|
||||||
|
# Test 3: Happy path - run with explicit vars
|
||||||
|
class TestRunSucceedsWithExplicitVars(VarTestingBase):
|
||||||
|
"""Test that dbt run succeeds when vars provided via --vars"""
|
||||||
|
|
||||||
|
def test_run_succeeds_with_vars(self, project):
|
||||||
|
# run: dbt run --vars '{"my_var": "table"}'
|
||||||
|
# assert: succeeds
|
||||||
|
results = run_dbt(["run", "--vars", '{"materialized_var": "table"}'])
|
||||||
|
assert len(results) == 1
|
||||||
|
|
||||||
|
|
||||||
|
# Test 4: Run fails with the right error message
|
||||||
|
class TestRunFailsWithMissingVar(VarTestingBase):
|
||||||
|
"""Test dbt run fails with right error"""
|
||||||
|
|
||||||
|
def test_run_fails_with_error(self, project):
|
||||||
|
# IN TEST: dynamically remove default
|
||||||
|
update_config_file(
|
||||||
|
{"models": {"test_project": {"+materialized": "{{ var('materialized_var') }}"}}},
|
||||||
|
project.project_root,
|
||||||
|
"dbt_project.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
# run: dbt run
|
||||||
|
# assert: fails with "Required var 'X' not found"
|
||||||
|
try:
|
||||||
|
run_dbt(["run"], expect_pass=False)
|
||||||
|
assert False, "Expected run to fail with missing required var"
|
||||||
|
except CompilationError as e:
|
||||||
|
error_msg = str(e)
|
||||||
|
# ✅ Verify error message
|
||||||
|
assert "materialized_var" in error_msg, "Error should mention var name"
|
||||||
|
assert (
|
||||||
|
"Required var" in error_msg or "not found" in error_msg
|
||||||
|
), "Error should say 'Required var' or 'not found'"
|
||||||
|
|
||||||
|
|
||||||
|
# Test 5: compile also fails with the correct error
|
||||||
|
class TestCompileFailsWithMissingVar(VarTestingBase):
|
||||||
|
"""Test dbt compile fails with error for missing vars"""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
# config: start with simple hardcoded value (no var)
|
||||||
|
return {"models": {"test_project": {"+materialized": "view"}}}
|
||||||
|
|
||||||
|
def test_compile_fails_with_error(self, project):
|
||||||
|
# IN TEST: dynamically add var without default
|
||||||
|
update_config_file(
|
||||||
|
{"models": {"test_project": {"+materialized": "{{ var('compile_var_no_default') }}"}}},
|
||||||
|
project.project_root,
|
||||||
|
"dbt_project.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
# run: dbt compile
|
||||||
|
# assert: fails with "Required var 'X' not found"
|
||||||
|
try:
|
||||||
|
run_dbt(["compile"], expect_pass=False)
|
||||||
|
assert False, "Expected compile to fail with missing var"
|
||||||
|
except CompilationError as e:
|
||||||
|
error_msg = str(e)
|
||||||
|
assert "compile_var_no_default" in error_msg
|
||||||
|
assert "Required var" in error_msg or "not found" in error_msg
|
||||||
|
|
||||||
|
|
||||||
|
# Test 6: deps succeeds even when var missing
|
||||||
|
class TestDepsSucceedsEvenWhenVarMissing(VarTestingBase):
|
||||||
|
"""Test dbt deps succeeds even when var has no default"""
|
||||||
|
|
||||||
|
def test_deps_still_succeeds(self, project):
|
||||||
|
# run: dbt deps (succeeds)
|
||||||
|
results = run_dbt(["deps"])
|
||||||
|
assert results is None or results == []
|
||||||
|
|
||||||
|
# IN TEST: modify config to remove var default
|
||||||
|
update_config_file(
|
||||||
|
{"models": {"test_project": {"+materialized": "{{ var('materialized_var') }}"}}},
|
||||||
|
project.project_root,
|
||||||
|
"dbt_project.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
# run: dbt deps again (still succeeds - lenient mode)
|
||||||
|
results = run_dbt(["deps"])
|
||||||
|
assert results is None or results == []
|
||||||
|
|
||||||
|
# run: dbt run (fails - strict mode)
|
||||||
|
try:
|
||||||
|
run_dbt(["run"], expect_pass=False)
|
||||||
|
assert False, "Expected run to fail with missing var"
|
||||||
|
except CompilationError as e:
|
||||||
|
error_msg = str(e)
|
||||||
|
assert "materialized_var" in error_msg
|
||||||
|
assert "Required var" in error_msg or "not found" in error_msg
|
||||||
|
|
||||||
|
|
||||||
|
# Test 7: build also fails
|
||||||
|
class TestBuildFailsWithMissingVar(VarTestingBase):
|
||||||
|
"""Test dbt build fails with error for missing vars"""
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
# config: start with simple hardcoded value (no var)
|
||||||
|
return {"models": {"test_project": {"+materialized": "view"}}}
|
||||||
|
|
||||||
|
def test_build_fails_with_error(self, project):
|
||||||
|
# IN TEST: dynamically add var without default
|
||||||
|
update_config_file(
|
||||||
|
{"models": {"test_project": {"+materialized": "{{ var('build_var_no_default') }}"}}},
|
||||||
|
project.project_root,
|
||||||
|
"dbt_project.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
# run: dbt build
|
||||||
|
# assert: fails with "Required var 'X' not found"
|
||||||
|
try:
|
||||||
|
run_dbt(["build"], expect_pass=False)
|
||||||
|
assert False, "Expected build to fail with missing var"
|
||||||
|
except CompilationError as e:
|
||||||
|
error_msg = str(e)
|
||||||
|
assert "build_var_no_default" in error_msg
|
||||||
|
assert "Required var" in error_msg or "not found" in error_msg
|
||||||
|
|
||||||
|
|
||||||
|
# Test 8: debug with defaults
|
||||||
|
class TestDebugSucceedsWithVarDefaults(VarTestingBase):
|
||||||
|
"""Test dbt debug succeeds when vars have defaults (no regression)"""
|
||||||
|
|
||||||
|
def test_debug_succeeds(self, project):
|
||||||
|
# run: dbt debug
|
||||||
|
# assert: succeeds (no regression)
|
||||||
|
run_dbt(["debug"])
|
||||||
|
|
||||||
|
|
||||||
|
# Test 9: debug fails like run/compile (strict mode)
|
||||||
|
class TestDebugFailsWithMissingVar(VarTestingBase):
|
||||||
|
"""Test dbt debug fails with error (strict mode like run/compile)"""
|
||||||
|
|
||||||
|
def test_debug_fails_with_error(self, project):
|
||||||
|
# First verify debug works with default
|
||||||
|
run_dbt(["debug"])
|
||||||
|
|
||||||
|
# IN TEST: dynamically remove default
|
||||||
|
update_config_file(
|
||||||
|
{"models": {"test_project": {"+materialized": "{{ var('materialized_var') }}"}}},
|
||||||
|
project.project_root,
|
||||||
|
"dbt_project.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
# run: dbt debug
|
||||||
|
# assert: fails with "Required var 'X' not found"
|
||||||
|
try:
|
||||||
|
run_dbt(["debug"], expect_pass=False)
|
||||||
|
assert False, "Expected debug to fail with missing var"
|
||||||
|
except CompilationError as e:
|
||||||
|
error_msg = str(e)
|
||||||
|
assert "materialized_var" in error_msg
|
||||||
|
assert "Required var" in error_msg or "not found" in error_msg
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user