mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
52 Commits
er/add-tes
...
fix_spaces
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
191474055d | ||
|
|
2edd5b3335 | ||
|
|
668fe78e2d | ||
|
|
fe28d9e115 | ||
|
|
5cb127999c | ||
|
|
86b349f812 | ||
|
|
a70024f745 | ||
|
|
8b5884b527 | ||
|
|
4c1d0e92cd | ||
|
|
6e7e55212b | ||
|
|
11dbe679b9 | ||
|
|
c63ae89efb | ||
|
|
ee74a60082 | ||
|
|
607646b627 | ||
|
|
7e164e3ab7 | ||
|
|
7e72cace2b | ||
|
|
c53d67d3b5 | ||
|
|
cb56f4fdc1 | ||
|
|
f15e128d6c | ||
|
|
99d033ffec | ||
|
|
6fee361183 | ||
|
|
95581cc661 | ||
|
|
3c4456ddbf | ||
|
|
b44c2e498d | ||
|
|
c86cec3256 | ||
|
|
a1f005789d | ||
|
|
d03292e8b9 | ||
|
|
ebacedd89d | ||
|
|
fb41ce93d6 | ||
|
|
1e4e15c023 | ||
|
|
cf08b8411a | ||
|
|
e81f7fdbd5 | ||
|
|
96f54264b4 | ||
|
|
b945d177d3 | ||
|
|
ebc22fa26c | ||
|
|
a994ace2db | ||
|
|
f2a5ad0504 | ||
|
|
fe33dcc3d6 | ||
|
|
c95b1ea5e6 | ||
|
|
0d87d314ac | ||
|
|
71f3519611 | ||
|
|
02d7727365 | ||
|
|
f683e36468 | ||
|
|
cfaacc6e49 | ||
|
|
a029661e23 | ||
|
|
80b2a47d60 | ||
|
|
9af5ec6069 | ||
|
|
e46eae1f0e | ||
|
|
c07186855f | ||
|
|
3e7778c380 | ||
|
|
68970d09fa | ||
|
|
8c8c6284fb |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.8.0b1
|
||||
current_version = 1.8.0b2
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
@@ -35,5 +35,3 @@ first_value = 1
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:docker/Dockerfile]
|
||||
|
||||
53
.changes/1.8.0-b2.md
Normal file
53
.changes/1.8.0-b2.md
Normal file
@@ -0,0 +1,53 @@
|
||||
## dbt-core 1.8.0-b2 - April 03, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
||||
- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237))
|
||||
- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766))
|
||||
- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804))
|
||||
|
||||
### Fixes
|
||||
|
||||
- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
||||
- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583))
|
||||
- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360))
|
||||
- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581))
|
||||
- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860))
|
||||
- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532))
|
||||
- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755))
|
||||
- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624))
|
||||
- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511))
|
||||
- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593))
|
||||
- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770))
|
||||
- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787))
|
||||
- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608))
|
||||
- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078))
|
||||
- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827))
|
||||
|
||||
### Docs
|
||||
|
||||
- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/pull/9470))
|
||||
- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/pull/9566))
|
||||
- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/pull/9659))
|
||||
|
||||
### Contributors
|
||||
- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430))
|
||||
- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
||||
- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
||||
- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
@@ -3,4 +3,4 @@ body: "Bump actions/checkout from 3 to 4"
|
||||
time: 2023-10-05T15:18:48.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8781
|
||||
Issue: 8781
|
||||
|
||||
@@ -3,4 +3,4 @@ body: Begin using DSI 0.4.x
|
||||
time: 2023-10-31T13:19:54.750009-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
PR: "8892"
|
||||
Issue: "8892"
|
||||
|
||||
@@ -3,4 +3,4 @@ body: Update typing-extensions version to >=4.4
|
||||
time: 2023-11-06T13:00:51.062386-08:00
|
||||
custom:
|
||||
Author: tlento
|
||||
PR: "9012"
|
||||
Issue: "9012"
|
||||
|
||||
@@ -3,4 +3,4 @@ body: "Bump ddtrace from 2.1.7 to 2.3.0"
|
||||
time: 2023-11-22T00:18:40.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9132
|
||||
Issue: 9132
|
||||
|
||||
@@ -3,4 +3,4 @@ body: "Bump freezegun from 0.3.12 to 1.3.0"
|
||||
time: 2023-12-04T00:09:45.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9197
|
||||
Issue: 9197
|
||||
|
||||
@@ -3,4 +3,4 @@ body: "Bump actions/setup-python from 4 to 5"
|
||||
time: 2023-12-11T00:56:51.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9267
|
||||
Issue: 9267
|
||||
|
||||
@@ -3,4 +3,4 @@ body: "Bump actions/download-artifact from 3 to 4"
|
||||
time: 2024-01-15T01:20:30.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9374
|
||||
Issue: 9374
|
||||
|
||||
@@ -3,4 +3,4 @@ body: remove dbt/adapters and add dependency on dbt-adapters
|
||||
time: 2024-01-23T10:58:43.286952-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
PR: "9430"
|
||||
Issue: "9430"
|
||||
|
||||
@@ -3,4 +3,4 @@ body: "Bump actions/upload-artifact from 3 to 4"
|
||||
time: 2024-01-29T00:57:34.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9470
|
||||
Issue: 9470
|
||||
@@ -3,4 +3,4 @@ body: "Bump actions/cache from 3 to 4"
|
||||
time: 2024-01-29T00:57:43.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9471
|
||||
Issue: 9471
|
||||
|
||||
@@ -3,4 +3,4 @@ body: "Bump peter-evans/create-pull-request from 5 to 6"
|
||||
time: 2024-02-12T01:13:24.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9552
|
||||
Issue: 9552
|
||||
|
||||
@@ -3,4 +3,4 @@ body: Restrict protobuf to 4.* versions
|
||||
time: 2024-02-22T10:29:47.595435-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "9566"
|
||||
Issue: "9566"
|
||||
@@ -3,4 +3,4 @@ body: "Bump codecov/codecov-action from 3 to 4"
|
||||
time: 2024-02-26T00:44:12.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 9659
|
||||
Issue: 9659
|
||||
@@ -3,4 +3,4 @@ body: Cap dbt-semantic-interfaces version range to <0.6
|
||||
time: 2024-02-26T12:35:02.643779-08:00
|
||||
custom:
|
||||
Author: tlento
|
||||
PR: "9671"
|
||||
Issue: "9671"
|
||||
|
||||
@@ -3,4 +3,4 @@ body: bump dbt-common to accept major version 1
|
||||
time: 2024-02-27T15:11:15.583604-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
PR: "9690"
|
||||
Issue: "9690"
|
||||
|
||||
6
.changes/1.8.0/Features-20240322-103124.yaml
Normal file
6
.changes/1.8.0/Features-20240322-103124.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Allow metrics in semantic layer filters.
|
||||
time: 2024-03-22T10:31:24.76978-07:00
|
||||
custom:
|
||||
Author: courtneyholcomb
|
||||
Issue: "9804"
|
||||
6
.changes/1.8.0/Fixes-20240323-122018.yaml
Normal file
6
.changes/1.8.0/Fixes-20240323-122018.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make `args` variable to be un-modified by `dbt.invoke(args)`
|
||||
time: 2024-03-23T12:20:18.170948-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 8938 9787
|
||||
6
.changes/1.8.0/Fixes-20240326-003411.yaml
Normal file
6
.changes/1.8.0/Fixes-20240326-003411.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Unit test path outputs
|
||||
time: 2024-03-26T00:34:11.162594Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "9608"
|
||||
7
.changes/1.8.0/Fixes-20240326-162100.yaml
Normal file
7
.changes/1.8.0/Fixes-20240326-162100.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Fix assorted source freshness edgecases so check is run or actionable information
|
||||
is given
|
||||
time: 2024-03-26T16:21:00.008936-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "9078"
|
||||
7
.changes/1.8.0/Fixes-20240327-150013.yaml
Normal file
7
.changes/1.8.0/Fixes-20240327-150013.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: '"Fix Docker release process to account for both historical and current versions
|
||||
of `dbt-postgres`'
|
||||
time: 2024-03-27T15:00:13.388268-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9827"
|
||||
@@ -3,4 +3,4 @@ body: Update Jinja2 to >= 3.1.3 to address CVE-2024-22195
|
||||
time: 2024-02-22T15:24:45.158305-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: CVE-2024-22195
|
||||
Issue: 9638
|
||||
|
||||
6
.changes/1.8.0/Under the Hood-20240223-092330.yaml
Normal file
6
.changes/1.8.0/Under the Hood-20240223-092330.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove unused key `wildcard` from MethodName enum
|
||||
time: 2024-02-23T09:23:30.029245-05:00
|
||||
custom:
|
||||
Author: asweet
|
||||
Issue: "9641"
|
||||
6
.changes/1.8.0/Under the Hood-20240325-172059.yaml
Normal file
6
.changes/1.8.0/Under the Hood-20240325-172059.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Include node_info in various Result events
|
||||
time: 2024-03-25T17:20:59.445718-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "9619"
|
||||
6
.changes/unreleased/Dependencies-20240117-100818.yaml
Normal file
6
.changes/unreleased/Dependencies-20240117-100818.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Relax pathspec upper bound version restriction
|
||||
time: 2024-01-17T10:08:18.009949641+01:00
|
||||
custom:
|
||||
Author: rzjfr
|
||||
Issue: "9373"
|
||||
6
.changes/unreleased/Dependencies-20240227-142138.yaml
Normal file
6
.changes/unreleased/Dependencies-20240227-142138.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker
|
||||
time: 2024-02-27T14:21:38.394757-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
PR: "9687"
|
||||
6
.changes/unreleased/Dependencies-20240331-103917.yaml
Normal file
6
.changes/unreleased/Dependencies-20240331-103917.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Remove duplicate dependency of protobuf in dev-requirements
|
||||
time: 2024-03-31T10:39:17.432017-07:00
|
||||
custom:
|
||||
Author: niteshy
|
||||
Issue: "9830"
|
||||
6
.changes/unreleased/Dependencies-20240410-183321.yaml
Normal file
6
.changes/unreleased/Dependencies-20240410-183321.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump black from 23.3.0 to >=24.3.0,<25.0"
|
||||
time: 2024-04-10T18:33:21.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8074
|
||||
6
.changes/unreleased/Features-20240307-153622.yaml
Normal file
6
.changes/unreleased/Features-20240307-153622.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support scrubbing secret vars
|
||||
time: 2024-03-07T15:36:22.754627+01:00
|
||||
custom:
|
||||
Author: nielspardon
|
||||
Issue: "7247"
|
||||
6
.changes/unreleased/Features-20240323-201230.yaml
Normal file
6
.changes/unreleased/Features-20240323-201230.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add wildcard support to the group selector method
|
||||
time: 2024-03-23T20:12:30.715975-04:00
|
||||
custom:
|
||||
Author: heysweet
|
||||
Issue: "9811"
|
||||
6
.changes/unreleased/Features-20240404-170728.yaml
Normal file
6
.changes/unreleased/Features-20240404-170728.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'source freshness precomputes metadata-based freshness in batch, if possible '
|
||||
time: 2024-04-04T17:07:28.717868-07:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8705"
|
||||
6
.changes/unreleased/Features-20240405-175733.yaml
Normal file
6
.changes/unreleased/Features-20240405-175733.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Better error message when trying to select a disabled model
|
||||
time: 2024-04-05T17:57:33.047963+02:00
|
||||
custom:
|
||||
Author: SamuelBFavarin
|
||||
Issue: "9747"
|
||||
6
.changes/unreleased/Features-20240408-094132.yaml
Normal file
6
.changes/unreleased/Features-20240408-094132.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support SQL in unit testing fixtures
|
||||
time: 2024-04-08T09:41:32.15936-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "9405"
|
||||
6
.changes/unreleased/Fixes-20240108-232035.yaml
Normal file
6
.changes/unreleased/Fixes-20240108-232035.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS
|
||||
time: 2024-01-08T23:20:35.339102+09:00
|
||||
custom:
|
||||
Author: jx2lee
|
||||
Issue: "7761"
|
||||
6
.changes/unreleased/Fixes-20240206-152435.yaml
Normal file
6
.changes/unreleased/Fixes-20240206-152435.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix conflict with newer versions of Snowplow tracker
|
||||
time: 2024-02-06T15:24:35.778891-06:00
|
||||
custom:
|
||||
Author: edgarrmondragon akurdyukov
|
||||
Issue: "8719"
|
||||
6
.changes/unreleased/Fixes-20240323-124558.yaml
Normal file
6
.changes/unreleased/Fixes-20240323-124558.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Only create the packages-install-path / dbt_packages folder during dbt deps
|
||||
time: 2024-03-23T12:45:58.159017-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 6985 9584
|
||||
6
.changes/unreleased/Fixes-20240402-135556.yaml
Normal file
6
.changes/unreleased/Fixes-20240402-135556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Exclude password-like fields for considering reparse
|
||||
time: 2024-04-02T13:55:56.169953-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9795"
|
||||
6
.changes/unreleased/Fixes-20240408-130646.yaml
Normal file
6
.changes/unreleased/Fixes-20240408-130646.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fixed query comments test
|
||||
time: 2024-04-08T13:06:46.648144+02:00
|
||||
custom:
|
||||
Author: damian3031
|
||||
Issue: "9860"
|
||||
6
.changes/unreleased/Fixes-20240409-233347.yaml
Normal file
6
.changes/unreleased/Fixes-20240409-233347.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Begin warning people about spaces in model names
|
||||
time: 2024-04-09T23:33:47.850166-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "9397"
|
||||
6
.changes/unreleased/Fixes-20240412-095718.yaml
Normal file
6
.changes/unreleased/Fixes-20240412-095718.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Disambiguiate FreshnessConfigProblem error message
|
||||
time: 2024-04-12T09:57:18.417882-07:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9891"
|
||||
6
.changes/unreleased/Security-20240417-141316.yaml
Normal file
6
.changes/unreleased/Security-20240417-141316.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Security
|
||||
body: Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg
|
||||
time: 2024-04-17T14:13:16.896353-05:00
|
||||
custom:
|
||||
Author: emmoop
|
||||
Issue: "9951"
|
||||
6
.changes/unreleased/Under the Hood-20240412-132000.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240412-132000.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove non dbt.artifacts dbt.* imports from dbt/artifacts
|
||||
time: 2024-04-12T13:20:00.017737-07:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9926"
|
||||
6
.changes/unreleased/Under the Hood-20240412-134502.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240412-134502.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Migrate to using `error_tag` provided by `dbt-common`
|
||||
time: 2024-04-12T13:45:02.879023-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "9914"
|
||||
6
.changes/unreleased/Under the Hood-20240416-150030.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240416-150030.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add a test for semantic manifest and move test fixtures needed for it
|
||||
time: 2024-04-16T15:00:30.614286-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9665"
|
||||
@@ -31,43 +31,7 @@ kinds:
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
- label: Security
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
newlines:
|
||||
afterChangelogHeader: 1
|
||||
@@ -106,18 +70,10 @@ footerFormat: |
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
|
||||
2
.github/workflows/bot-changelog.yml
vendored
2
.github/workflows/bot-changelog.yml
vendored
@@ -56,4 +56,4 @@ jobs:
|
||||
commit_message: "Add automated changelog yaml from template for bot PR"
|
||||
changie_kind: ${{ matrix.changie_kind }}
|
||||
label: ${{ matrix.label }}
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ${{ github.event.pull_request.number }}"
|
||||
|
||||
6
.github/workflows/community-label.yml
vendored
6
.github/workflows/community-label.yml
vendored
@@ -11,7 +11,8 @@
|
||||
name: Label community PRs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
# have to use pull_request_target since community PRs come from forks
|
||||
pull_request_target:
|
||||
types: [opened, ready_for_review]
|
||||
|
||||
defaults:
|
||||
@@ -20,6 +21,7 @@ defaults:
|
||||
|
||||
permissions:
|
||||
pull-requests: write # labels PRs
|
||||
contents: read # reads team membership
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
@@ -32,6 +34,6 @@ jobs:
|
||||
github.event.action == 'ready_for_review' )
|
||||
uses: dbt-labs/actions/.github/workflows/label-community.yml@main
|
||||
with:
|
||||
github_team: 'core'
|
||||
github_team: 'core-group'
|
||||
label: 'community'
|
||||
secrets: inherit
|
||||
|
||||
26
.github/workflows/docs-issue.yml
vendored
26
.github/workflows/docs-issue.yml
vendored
@@ -1,19 +1,18 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
name: Open issues in docs.getdbt.com repo when an issue is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
issues:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
@@ -21,23 +20,22 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
issues: write # comments on issues
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||
# we only want to run this when the issue is closed as completed and the label `user docs` has been assigned.
|
||||
# If this logic does not exist in this workflow, it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.pull_request.merged == true) &&
|
||||
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||
(github.event.issue.state == 'closed' &&
|
||||
github.event.issue.state_reason == 'completed' &&
|
||||
contains( github.event.issue.labels.*.name, 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
|
||||
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@@ -288,7 +288,7 @@ jobs:
|
||||
- name: Install source distributions
|
||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||
run: |
|
||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
|
||||
1
.github/workflows/nightly-release.yml
vendored
1
.github/workflows/nightly-release.yml
vendored
@@ -20,6 +20,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
packages: write # this is the permission that allows Docker release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
||||
100
.github/workflows/release-docker.yml
vendored
100
.github/workflows/release-docker.yml
vendored
@@ -1,100 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow will generate a series of docker images for dbt and push them to the github container registry
|
||||
#
|
||||
# **why?**
|
||||
# Docker images for dbt are used in a number of important places throughout the dbt ecosystem.
|
||||
# This is how we keep those images up-to-date.
|
||||
#
|
||||
# **when?**
|
||||
# This is triggered manually
|
||||
name: Docker release
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to release
|
||||
type: choice
|
||||
options:
|
||||
- dbt-core
|
||||
- dbt-bigquery
|
||||
- dbt-postgres
|
||||
- dbt-redshift
|
||||
- dbt-snowflake
|
||||
- dbt-spark
|
||||
required: true
|
||||
version_number:
|
||||
description: The version number to release as a SemVer (e.g. 1.0.0b1, without `latest` or `v`)
|
||||
required: true
|
||||
dry_run:
|
||||
description: Dry Run (don't publish)
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
version_metadata:
|
||||
name: Get version metadata
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
fully_qualified_tags: ${{ steps.tags.outputs.fully_qualified_tags }}
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get the tags to publish
|
||||
id: tags
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package_name: ${{ inputs.package }}
|
||||
new_version: ${{ inputs.version_number }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
setup_image_builder:
|
||||
name: Set up Docker image builder
|
||||
runs-on: ubuntu-latest
|
||||
needs: [version_metadata]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
build_and_push:
|
||||
name: Build images and push to GHCR
|
||||
runs-on: ubuntu-latest
|
||||
needs: [setup_image_builder, version_metadata]
|
||||
steps:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
BUILD_ARG_NAME=$(echo ${{ inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log publishing configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo Package: ${{ inputs.package }}
|
||||
echo Version: ${{ inputs.version_number }}
|
||||
echo Tags: ${{ needs.version_metadata.outputs.fully_qualified_tags }}
|
||||
echo Build Arg Name: ${{ steps.build_arg.outputs.name }}
|
||||
echo Build Arg Value: ${{ steps.build_arg.outputs.value }}
|
||||
|
||||
- name: Build and push `${{ inputs.package }}`
|
||||
if: ${{ !inputs.dry_run }}
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ inputs.package }}
|
||||
build-args: ${{ steps.build_arg.outputs.name }}_ref=${{ steps.build_arg.outputs.value }}@v${{ inputs.version_number }}
|
||||
tags: ${{ needs.version_metadata.outputs.fully_qualified_tags }}
|
||||
63
.github/workflows/release.yml
vendored
63
.github/workflows/release.yml
vendored
@@ -7,6 +7,7 @@
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
# - release it to Docker
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
@@ -14,7 +15,8 @@
|
||||
# **when?**
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPI
|
||||
name: "Release to GitHub, PyPI & Docker"
|
||||
run-name: "Release ${{ inputs.version_number }} to GitHub, PyPI & Docker"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -37,6 +39,11 @@ on:
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
only_docker:
|
||||
description: "Only release Docker image, skip GitHub & PyPI"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
target_branch:
|
||||
@@ -79,6 +86,7 @@ jobs:
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
echo Only Docker: ${{ inputs.only_docker }}
|
||||
|
||||
- name: "Checkout target branch"
|
||||
uses: actions/checkout@v4
|
||||
@@ -97,6 +105,7 @@ jobs:
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
needs: [job-setup]
|
||||
if: ${{ !inputs.only_docker }}
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
@@ -112,7 +121,7 @@ jobs:
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
@@ -126,7 +135,7 @@ jobs:
|
||||
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
needs: [job-setup, bump-version-generate-changelog]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
@@ -147,7 +156,7 @@ jobs:
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
@@ -174,6 +183,51 @@ jobs:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
determine-docker-package:
|
||||
# dbt-postgres exists within dbt-core for versions 1.7 and earlier but is a separate package for 1.8 and later.
|
||||
# determine if we need to release dbt-core or both dbt-core and dbt-postgres
|
||||
name: Determine Docker Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [pypi-release]
|
||||
outputs:
|
||||
matrix: ${{ steps.determine-docker-package.outputs.matrix }}
|
||||
steps:
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ inputs.version_number }}
|
||||
|
||||
- name: "Determine Packages to Release"
|
||||
id: determine-docker-package
|
||||
run: |
|
||||
if [ ${{ steps.semver.outputs.minor }} -ge 8 ]; then
|
||||
json_output={\"package\":[\"dbt-core\"]}
|
||||
else
|
||||
json_output={\"package\":[\"dbt-core\",\"dbt-postgres\"]}
|
||||
fi
|
||||
echo "matrix=$json_output" >> $GITHUB_OUTPUT
|
||||
|
||||
docker-release:
|
||||
name: "Docker Release for ${{ matrix.package }}"
|
||||
needs: [determine-docker-package]
|
||||
# We cannot release to docker on a test run because it uses the tag in GitHub as
|
||||
# what we need to release but draft releases don't actually tag the commit so it
|
||||
# finds nothing to release
|
||||
if: ${{ !failure() && !cancelled() && (!inputs.test_run || inputs.only_docker) }}
|
||||
strategy:
|
||||
matrix: ${{fromJson(needs.determine-docker-package.outputs.matrix)}}
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main
|
||||
with:
|
||||
package: ${{ matrix.package }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
@@ -184,6 +238,7 @@ jobs:
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
docker-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
|
||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -5,6 +5,61 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.8.0-b2 - April 03, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
||||
- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237))
|
||||
- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766))
|
||||
- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804))
|
||||
|
||||
### Fixes
|
||||
|
||||
- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
||||
- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583))
|
||||
- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360))
|
||||
- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581))
|
||||
- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860))
|
||||
- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532))
|
||||
- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755))
|
||||
- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624))
|
||||
- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511))
|
||||
- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593))
|
||||
- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770))
|
||||
- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787))
|
||||
- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608))
|
||||
- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078))
|
||||
- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827))
|
||||
|
||||
### Docs
|
||||
|
||||
- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/pull/9470))
|
||||
- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/pull/9566))
|
||||
- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/pull/9659))
|
||||
|
||||
### Contributors
|
||||
- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430))
|
||||
- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
||||
- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
||||
- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
|
||||
|
||||
## dbt-core 1.8.0-b1 - February 28, 2024
|
||||
|
||||
### Breaking Changes
|
||||
@@ -193,7 +248,6 @@
|
||||
- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/pull/9012), [#9671](https://github.com/dbt-labs/dbt-core/pull/9671))
|
||||
- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -170,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
python3 -m pytest tests/unit/test_graph.py
|
||||
python3 -m pytest tests/unit/test_base_column.py
|
||||
# run a specific unit test
|
||||
python3 -m pytest tests/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
||||
# run specific Postgres functional tests
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
|
||||
1
core/dbt/artifacts/exceptions/__init__.py
Normal file
1
core/dbt/artifacts/exceptions/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from dbt.artifacts.exceptions.schemas import IncompatibleSchemaError
|
||||
31
core/dbt/artifacts/exceptions/schemas.py
Normal file
31
core/dbt/artifacts/exceptions/schemas.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Optional
|
||||
|
||||
from dbt_common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class IncompatibleSchemaError(DbtRuntimeError):
|
||||
def __init__(self, expected: str, found: Optional[str] = None) -> None:
|
||||
self.expected = expected
|
||||
self.found = found
|
||||
self.filename = "input file"
|
||||
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def add_filename(self, filename: str):
|
||||
self.filename = filename
|
||||
self.msg = self.get_message()
|
||||
|
||||
def get_message(self) -> str:
|
||||
found_str = "nothing"
|
||||
if self.found is not None:
|
||||
found_str = f'"{self.found}"'
|
||||
|
||||
msg = (
|
||||
f'Expected a schema version of "{self.expected}" in '
|
||||
f"{self.filename}, but found {found_str}. Are you running with a "
|
||||
f"different version of dbt?"
|
||||
)
|
||||
return msg
|
||||
|
||||
CODE = 10014
|
||||
MESSAGE = "Incompatible Schema"
|
||||
@@ -11,7 +11,7 @@ from dbt_common.contracts.config.metadata import Metadata, ShowBehavior
|
||||
from dbt_common.contracts.config.materialization import OnConfigurationChangeOption
|
||||
from dbt.artifacts.resources.base import Docs
|
||||
from dbt.artifacts.resources.types import ModelHookType
|
||||
from dbt.contracts.graph.utils import validate_color
|
||||
from dbt.artifacts.utils.validation import validate_color
|
||||
from dbt import hooks
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ class NodeRelation(dbtClassMixin):
|
||||
alias: str
|
||||
schema_name: str # TODO: Could this be called simply "schema" so we could reuse StateRelation?
|
||||
database: Optional[str] = None
|
||||
relation_name: Optional[str] = None
|
||||
relation_name: Optional[str] = ""
|
||||
|
||||
|
||||
# ====================================
|
||||
|
||||
@@ -30,6 +30,7 @@ class UnitTestConfig(BaseConfig):
|
||||
class UnitTestFormat(StrEnum):
|
||||
CSV = "csv"
|
||||
Dict = "dict"
|
||||
SQL = "sql"
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,22 +1,18 @@
|
||||
import dataclasses
|
||||
from datetime import datetime
|
||||
import functools
|
||||
from mashumaro.jsonschema import build_json_schema
|
||||
from mashumaro.jsonschema.dialects import DRAFT_2020_12
|
||||
from typing import ClassVar, Type, TypeVar, Dict, Any, Optional
|
||||
|
||||
from dbt_common.clients.system import write_json, read_json
|
||||
from dbt.exceptions import (
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
IncompatibleSchemaError,
|
||||
)
|
||||
from dbt.version import __version__
|
||||
|
||||
from dbt_common.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt_common.events.functions import get_metadata_vars
|
||||
from dbt_common.invocation import get_invocation_id
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
|
||||
from mashumaro.jsonschema import build_json_schema
|
||||
from mashumaro.jsonschema.dialects import DRAFT_2020_12
|
||||
import functools
|
||||
from dbt.version import __version__
|
||||
from dbt.artifacts.exceptions import IncompatibleSchemaError
|
||||
|
||||
|
||||
BASE_SCHEMAS_URL = "https://schemas.getdbt.com/"
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt_common.events.functions import fire_event
|
||||
from dbt.events.types import TimingInfoCollected
|
||||
from dbt_common.events.contextvars import get_node_info
|
||||
from dbt_common.events.helpers import datetime_to_json_string
|
||||
from dbt.logger import TimingProcessor
|
||||
from dbt_common.utils import cast_to_str, cast_to_int
|
||||
from dbt_common.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
@@ -45,13 +41,6 @@ class collect_timing_info:
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.timing_info.end()
|
||||
self.callback(self.timing_info)
|
||||
# Note: when legacy logger is removed, we can remove the following line
|
||||
with TimingProcessor(self.timing_info):
|
||||
fire_event(
|
||||
TimingInfoCollected(
|
||||
timing_info=self.timing_info.to_msg_dict(), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class RunningStatus(StrEnum):
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import threading
|
||||
from typing import Any, Optional, Iterable, Tuple, Sequence, Dict, TYPE_CHECKING
|
||||
import copy
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from dbt.contracts.graph.nodes import CompiledNode
|
||||
from dbt.constants import SECRET_ENV_PREFIX
|
||||
from dbt.artifacts.resources import CompiledResource
|
||||
from dbt.artifacts.schemas.base import (
|
||||
BaseArtifactMetadata,
|
||||
ArtifactMixin,
|
||||
@@ -19,6 +21,7 @@ from dbt.artifacts.schemas.results import (
|
||||
ExecutionResult,
|
||||
)
|
||||
from dbt_common.clients.system import write_json
|
||||
from dbt.exceptions import scrub_secrets
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -67,7 +70,7 @@ class RunResultOutput(BaseResult):
|
||||
|
||||
def process_run_result(result: RunResult) -> RunResultOutput:
|
||||
|
||||
compiled = isinstance(result.node, CompiledNode)
|
||||
compiled = isinstance(result.node, CompiledResource)
|
||||
|
||||
return RunResultOutput(
|
||||
unique_id=result.node.unique_id,
|
||||
@@ -123,7 +126,26 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
||||
dbt_schema_version=str(cls.dbt_schema_version),
|
||||
generated_at=generated_at,
|
||||
)
|
||||
return cls(metadata=meta, results=processed_results, elapsed_time=elapsed_time, args=args)
|
||||
|
||||
secret_vars = [
|
||||
v for k, v in args["vars"].items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()
|
||||
]
|
||||
|
||||
scrubbed_args = copy.deepcopy(args)
|
||||
|
||||
# scrub secrets in invocation command
|
||||
scrubbed_args["invocation_command"] = scrub_secrets(
|
||||
scrubbed_args["invocation_command"], secret_vars
|
||||
)
|
||||
|
||||
# scrub secrets in vars dict
|
||||
scrubbed_args["vars"] = {
|
||||
k: scrub_secrets(v, secret_vars) for k, v in scrubbed_args["vars"].items()
|
||||
}
|
||||
|
||||
return cls(
|
||||
metadata=meta, results=processed_results, elapsed_time=elapsed_time, args=scrubbed_args
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]:
|
||||
|
||||
@@ -352,6 +352,11 @@ class Flags:
|
||||
if getattr(self, "MACRO_DEBUGGING", None) is not None:
|
||||
jinja.MACRO_DEBUGGING = getattr(self, "MACRO_DEBUGGING")
|
||||
|
||||
# This is here to prevent mypy from complaining about all of the
|
||||
# attributes which we added dynamically.
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return super().__get_attribute__(name) # type: ignore
|
||||
|
||||
|
||||
CommandParams = List[str]
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ class dbtRunner:
|
||||
|
||||
def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult:
|
||||
try:
|
||||
dbt_ctx = cli.make_context(cli.name, args)
|
||||
dbt_ctx = cli.make_context(cli.name, args.copy())
|
||||
dbt_ctx.obj = {
|
||||
"manifest": self.manifest,
|
||||
"callbacks": self.callbacks,
|
||||
@@ -399,7 +399,6 @@ def debug(ctx, **kwargs):
|
||||
|
||||
task = DebugTask(
|
||||
ctx.obj["flags"],
|
||||
None,
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
@@ -464,7 +463,7 @@ def init(ctx, **kwargs):
|
||||
"""Initialize a new dbt project."""
|
||||
from dbt.task.init import InitTask
|
||||
|
||||
task = InitTask(ctx.obj["flags"], None)
|
||||
task = InitTask(ctx.obj["flags"])
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
|
||||
@@ -274,7 +274,6 @@ class Compiler:
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.project_target_path)
|
||||
make_directory(self.config.packages_install_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
|
||||
@@ -36,6 +36,7 @@ def insensitive_patterns(*patterns: str):
|
||||
@dataclass
|
||||
class UnitTestNodeConfig(NodeConfig):
|
||||
expected_rows: List[Dict[str, Any]] = field(default_factory=list)
|
||||
expected_sql: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -954,16 +954,6 @@ class UnitTestDefinition(NodeInfoMixin, GraphNode, UnitTestDefinitionResource):
|
||||
def resource_class(cls) -> Type[UnitTestDefinitionResource]:
|
||||
return UnitTestDefinitionResource
|
||||
|
||||
@property
|
||||
def build_path(self):
|
||||
# TODO: is this actually necessary?
|
||||
return self.original_file_path
|
||||
|
||||
@property
|
||||
def compiled_path(self):
|
||||
# TODO: is this actually necessary?
|
||||
return self.original_file_path
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return self.depends_on.nodes
|
||||
@@ -1001,7 +991,7 @@ class UnitTestDefinition(NodeInfoMixin, GraphNode, UnitTestDefinitionResource):
|
||||
@dataclass
|
||||
class UnitTestFileFixture(BaseNode):
|
||||
resource_type: Literal[NodeType.Fixture]
|
||||
rows: Optional[List[Dict[str, Any]]] = None
|
||||
rows: Optional[Union[List[Dict[str, Any]], str]] = None
|
||||
|
||||
|
||||
# ====================================
|
||||
|
||||
@@ -296,6 +296,7 @@ class Project(dbtClassMixin):
|
||||
|
||||
@dataclass
|
||||
class ProjectFlags(ExtensibleDbtClassMixin):
|
||||
allow_spaces_in_model_names: Optional[bool] = True
|
||||
cache_selected_only: Optional[bool] = None
|
||||
debug: Optional[bool] = None
|
||||
fail_fast: Optional[bool] = None
|
||||
@@ -320,7 +321,10 @@ class ProjectFlags(ExtensibleDbtClassMixin):
|
||||
|
||||
@property
|
||||
def project_only_flags(self) -> Dict[str, Any]:
|
||||
return {"source_freshness_run_project_hooks": self.source_freshness_run_project_hooks}
|
||||
return {
|
||||
"source_freshness_run_project_hooks": self.source_freshness_run_project_hooks,
|
||||
"allow_spaces_in_model_names": self.allow_spaces_in_model_names,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -2,12 +2,12 @@ from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.artifacts.exceptions import IncompatibleSchemaError
|
||||
from dbt.artifacts.schemas.manifest import WritableManifest
|
||||
from dbt.artifacts.schemas.freshness import FreshnessExecutionResultArtifact
|
||||
from dbt.artifacts.schemas.run import RunResultsArtifact
|
||||
from dbt_common.events.functions import fire_event
|
||||
from dbt.events.types import WarnStateTargetEqual
|
||||
from dbt.exceptions import IncompatibleSchemaError
|
||||
|
||||
|
||||
def load_result_state(results_path) -> Optional[RunResultsArtifact]:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, NoReturn, Union, Type, Iterator, Set, Any
|
||||
from typing import Dict, List, NoReturn, Type, Iterator, Set, Any
|
||||
|
||||
from dbt.exceptions import (
|
||||
DuplicateDependencyToRootError,
|
||||
@@ -17,14 +17,13 @@ from dbt.deps.git import GitUnpinnedPackage
|
||||
from dbt.deps.registry import RegistryUnpinnedPackage
|
||||
|
||||
from dbt.contracts.project import (
|
||||
PackageSpec,
|
||||
LocalPackage,
|
||||
TarballPackage,
|
||||
GitPackage,
|
||||
RegistryPackage,
|
||||
)
|
||||
|
||||
PackageContract = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageListing:
|
||||
@@ -68,7 +67,7 @@ class PackageListing:
|
||||
else:
|
||||
self.packages[key] = package
|
||||
|
||||
def update_from(self, src: List[PackageContract]) -> None:
|
||||
def update_from(self, src: List[PackageSpec]) -> None:
|
||||
pkg: UnpinnedPackage
|
||||
for contract in src:
|
||||
if isinstance(contract, LocalPackage):
|
||||
@@ -84,9 +83,7 @@ class PackageListing:
|
||||
self.incorporate(pkg)
|
||||
|
||||
@classmethod
|
||||
def from_contracts(
|
||||
cls: Type["PackageListing"], src: List[PackageContract]
|
||||
) -> "PackageListing":
|
||||
def from_contracts(cls: Type["PackageListing"], src: List[PackageSpec]) -> "PackageListing":
|
||||
self = cls({})
|
||||
self.update_from(src)
|
||||
return self
|
||||
@@ -114,7 +111,7 @@ def _check_for_duplicate_project_names(
|
||||
|
||||
|
||||
def resolve_packages(
|
||||
packages: List[PackageContract],
|
||||
packages: List[PackageSpec],
|
||||
project: Project,
|
||||
cli_vars: Dict[str, Any],
|
||||
) -> List[PinnedPackage]:
|
||||
@@ -137,7 +134,7 @@ def resolve_packages(
|
||||
return resolved
|
||||
|
||||
|
||||
def resolve_lock_packages(packages: List[PackageContract]) -> List[PinnedPackage]:
|
||||
def resolve_lock_packages(packages: List[PackageSpec]) -> List[PinnedPackage]:
|
||||
lock_packages = PackageListing.from_contracts(packages)
|
||||
final = PackageListing()
|
||||
|
||||
|
||||
@@ -403,6 +403,30 @@ message ProjectFlagsMovedDeprecationMsg {
|
||||
ProjectFlagsMovedDeprecation data = 2;
|
||||
}
|
||||
|
||||
// D014
|
||||
message SpacesInModelNameDeprecation {
|
||||
string model_name = 1;
|
||||
string model_version = 2;
|
||||
string level = 3;
|
||||
}
|
||||
|
||||
message SpacesInModelNameDeprecationMsg {
|
||||
CoreEventInfo info = 1;
|
||||
SpacesInModelNameDeprecation data = 2;
|
||||
}
|
||||
|
||||
// D015
|
||||
message TotalModelNamesWithSpacesDeprecation {
|
||||
int32 count_invalid_names = 1;
|
||||
bool show_debug_hint = 2;
|
||||
string level = 3;
|
||||
}
|
||||
|
||||
message TotalModelNamesWithSpacesDeprecationMsg {
|
||||
CoreEventInfo info = 1;
|
||||
TotalModelNamesWithSpacesDeprecation data = 2;
|
||||
}
|
||||
|
||||
// I065
|
||||
message DeprecatedModel {
|
||||
string model_name = 1;
|
||||
@@ -1772,6 +1796,7 @@ message RunResultWarning {
|
||||
string resource_type = 1;
|
||||
string node_name = 2;
|
||||
string path = 3;
|
||||
NodeInfo node_info = 4;
|
||||
}
|
||||
|
||||
message RunResultWarningMsg {
|
||||
@@ -1784,6 +1809,7 @@ message RunResultFailure {
|
||||
string resource_type = 1;
|
||||
string node_name = 2;
|
||||
string path = 3;
|
||||
NodeInfo node_info = 4;
|
||||
}
|
||||
|
||||
message RunResultFailureMsg {
|
||||
@@ -1804,6 +1830,7 @@ message StatsLineMsg {
|
||||
// Z024
|
||||
message RunResultError {
|
||||
string msg = 1;
|
||||
NodeInfo node_info = 2;
|
||||
}
|
||||
|
||||
message RunResultErrorMsg {
|
||||
@@ -1814,6 +1841,7 @@ message RunResultErrorMsg {
|
||||
// Z025
|
||||
message RunResultErrorNoMessage {
|
||||
string status = 1;
|
||||
NodeInfo node_info = 2;
|
||||
}
|
||||
|
||||
message RunResultErrorNoMessageMsg {
|
||||
@@ -1824,6 +1852,7 @@ message RunResultErrorNoMessageMsg {
|
||||
// Z026
|
||||
message SQLCompiledPath {
|
||||
string path = 1;
|
||||
NodeInfo node_info = 2;
|
||||
}
|
||||
|
||||
message SQLCompiledPathMsg {
|
||||
@@ -1834,6 +1863,7 @@ message SQLCompiledPathMsg {
|
||||
// Z027
|
||||
message CheckNodeTestFailure {
|
||||
string relation_name = 1;
|
||||
NodeInfo node_info = 2;
|
||||
}
|
||||
|
||||
message CheckNodeTestFailureMsg {
|
||||
@@ -1958,6 +1988,7 @@ message TrackingInitializeFailureMsg {
|
||||
// Z046
|
||||
message RunResultWarningMessage {
|
||||
string msg = 1;
|
||||
NodeInfo node_info = 2;
|
||||
}
|
||||
|
||||
message RunResultWarningMessageMsg {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
|
||||
from dbt.constants import MAXIMUM_SEED_SIZE_NAME, PIN_PACKAGE_URL
|
||||
from dbt_common.ui import warning_tag, line_wrap_message, green, yellow, red
|
||||
from dbt_common.ui import error_tag, warning_tag, line_wrap_message, green, yellow, red
|
||||
from dbt_common.events.base_types import EventLevel
|
||||
from dbt_common.events.format import (
|
||||
format_fancy_output_line,
|
||||
@@ -395,7 +395,7 @@ class TestsConfigDeprecation(WarnLevel):
|
||||
def message(self) -> str:
|
||||
description = (
|
||||
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
||||
"Please update your `dbt_project.yml` configuration to reflect this change."
|
||||
"Please see https://docs.getdbt.com/docs/build/data-tests#new-data_tests-syntax for more information."
|
||||
)
|
||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
||||
|
||||
@@ -413,6 +413,43 @@ class ProjectFlagsMovedDeprecation(WarnLevel):
|
||||
return warning_tag(f"Deprecated functionality\n\n{description}")
|
||||
|
||||
|
||||
class SpacesInModelNameDeprecation(DynamicLevel):
|
||||
def code(self) -> str:
|
||||
return "D014"
|
||||
|
||||
def message(self) -> str:
|
||||
version = ".v" + self.model_version if self.model_version else ""
|
||||
description = (
|
||||
f"Model `{self.model_name}{version}` has spaces in its name. This is deprecated and "
|
||||
"may cause errors when using dbt."
|
||||
)
|
||||
|
||||
if self.level == EventLevel.ERROR.value:
|
||||
description = error_tag(description)
|
||||
elif self.level == EventLevel.WARN.value:
|
||||
description = warning_tag(description)
|
||||
|
||||
return line_wrap_message(description)
|
||||
|
||||
|
||||
class TotalModelNamesWithSpacesDeprecation(DynamicLevel):
|
||||
def code(self) -> str:
|
||||
return "D015"
|
||||
|
||||
def message(self) -> str:
|
||||
description = f"Spaces in model names found in {self.count_invalid_names} model(s), which is deprecated."
|
||||
|
||||
if self.show_debug_hint:
|
||||
description += " Run again with `--debug` to see them all."
|
||||
|
||||
if self.level == EventLevel.ERROR.value:
|
||||
description = error_tag(description)
|
||||
elif self.level == EventLevel.WARN.value:
|
||||
description = warning_tag(description)
|
||||
|
||||
return line_wrap_message(description)
|
||||
|
||||
|
||||
# =======================================================
|
||||
# I - Project parsing
|
||||
# =======================================================
|
||||
@@ -1099,7 +1136,7 @@ class NoNodesForSelectionCriteria(WarnLevel):
|
||||
return "M030"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"The selection criterion '{self.spec_raw}' does not match any nodes"
|
||||
return f"The selection criterion '{self.spec_raw}' does not match any enabled nodes"
|
||||
|
||||
|
||||
class DepsLockUpdating(InfoLevel):
|
||||
|
||||
@@ -17,6 +17,8 @@ from dbt.node_types import NodeType, AccessType, REFABLE_NODE_TYPES
|
||||
|
||||
from dbt_common.dataclass_schema import ValidationError
|
||||
|
||||
from dbt.constants import SECRET_ENV_PREFIX
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import agate
|
||||
@@ -77,34 +79,6 @@ class JSONValidationError(DbtValidationError):
|
||||
return (JSONValidationError, (self.typename, self.errors))
|
||||
|
||||
|
||||
class IncompatibleSchemaError(DbtRuntimeError):
|
||||
def __init__(self, expected: str, found: Optional[str] = None) -> None:
|
||||
self.expected = expected
|
||||
self.found = found
|
||||
self.filename = "input file"
|
||||
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def add_filename(self, filename: str):
|
||||
self.filename = filename
|
||||
self.msg = self.get_message()
|
||||
|
||||
def get_message(self) -> str:
|
||||
found_str = "nothing"
|
||||
if self.found is not None:
|
||||
found_str = f'"{self.found}"'
|
||||
|
||||
msg = (
|
||||
f'Expected a schema version of "{self.expected}" in '
|
||||
f"{self.filename}, but found {found_str}. Are you running with a "
|
||||
f"different version of dbt?"
|
||||
)
|
||||
return msg
|
||||
|
||||
CODE = 10014
|
||||
MESSAGE = "Incompatible Schema"
|
||||
|
||||
|
||||
class AliasError(DbtValidationError):
|
||||
pass
|
||||
|
||||
@@ -361,7 +335,10 @@ class RequiredVarNotFoundError(CompilationError):
|
||||
pretty_vars = json.dumps(dct, sort_keys=True, indent=4)
|
||||
|
||||
msg = f"Required var '{self.var_name}' not found in config:\nVars supplied to {node_name} = {pretty_vars}"
|
||||
return msg
|
||||
return scrub_secrets(msg, self.var_secrets())
|
||||
|
||||
def var_secrets(self) -> List[str]:
|
||||
return [v for k, v in self.merged.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()]
|
||||
|
||||
|
||||
class PackageNotFoundForMacroError(CompilationError):
|
||||
|
||||
@@ -54,7 +54,6 @@ class MethodName(StrEnum):
|
||||
Metric = "metric"
|
||||
Result = "result"
|
||||
SourceStatus = "source_status"
|
||||
Wildcard = "wildcard"
|
||||
Version = "version"
|
||||
SemanticModel = "semantic_model"
|
||||
SavedQuery = "saved_query"
|
||||
@@ -258,37 +257,36 @@ class QualifiedNameSelectorMethod(SelectorMethod):
|
||||
:param str selector: The selector or node name
|
||||
"""
|
||||
non_source_nodes = list(self.non_source_nodes(included_nodes))
|
||||
for node, real_node in non_source_nodes:
|
||||
if self.node_is_match(selector, real_node.fqn, real_node.is_versioned):
|
||||
yield node
|
||||
for unique_id, node in non_source_nodes:
|
||||
if self.node_is_match(selector, node.fqn, node.is_versioned):
|
||||
yield unique_id
|
||||
|
||||
|
||||
class TagSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""yields nodes from included that have the specified tag"""
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if hasattr(real_node, "tags") and any(
|
||||
fnmatch(tag, selector) for tag in real_node.tags
|
||||
):
|
||||
yield node
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
if hasattr(node, "tags") and any(fnmatch(tag, selector) for tag in node.tags):
|
||||
yield unique_id
|
||||
|
||||
|
||||
class GroupSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""yields nodes from included in the specified group"""
|
||||
for node, real_node in self.groupable_nodes(included_nodes):
|
||||
if selector == real_node.config.get("group"):
|
||||
yield node
|
||||
for unique_id, node in self.groupable_nodes(included_nodes):
|
||||
node_group = node.config.get("group")
|
||||
if node_group and fnmatch(node_group, selector):
|
||||
yield unique_id
|
||||
|
||||
|
||||
class AccessSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""yields model nodes matching the specified access level"""
|
||||
for node, real_node in self.parsed_nodes(included_nodes):
|
||||
if not isinstance(real_node, ModelNode):
|
||||
for unique_id, node in self.parsed_nodes(included_nodes):
|
||||
if not isinstance(node, ModelNode):
|
||||
continue
|
||||
if selector == real_node.access:
|
||||
yield node
|
||||
if selector == node.access:
|
||||
yield unique_id
|
||||
|
||||
|
||||
class SourceSelectorMethod(SelectorMethod):
|
||||
@@ -311,14 +309,14 @@ class SourceSelectorMethod(SelectorMethod):
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.source_nodes(included_nodes):
|
||||
if not fnmatch(real_node.package_name, target_package):
|
||||
for unique_id, node in self.source_nodes(included_nodes):
|
||||
if not fnmatch(node.package_name, target_package):
|
||||
continue
|
||||
if not fnmatch(real_node.source_name, target_source):
|
||||
if not fnmatch(node.source_name, target_source):
|
||||
continue
|
||||
if not fnmatch(real_node.name, target_table):
|
||||
if not fnmatch(node.name, target_table):
|
||||
continue
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class ExposureSelectorMethod(SelectorMethod):
|
||||
@@ -337,13 +335,13 @@ class ExposureSelectorMethod(SelectorMethod):
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.exposure_nodes(included_nodes):
|
||||
if not fnmatch(real_node.package_name, target_package):
|
||||
for unique_id, node in self.exposure_nodes(included_nodes):
|
||||
if not fnmatch(node.package_name, target_package):
|
||||
continue
|
||||
if not fnmatch(real_node.name, target_name):
|
||||
if not fnmatch(node.name, target_name):
|
||||
continue
|
||||
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class MetricSelectorMethod(SelectorMethod):
|
||||
@@ -362,13 +360,13 @@ class MetricSelectorMethod(SelectorMethod):
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.metric_nodes(included_nodes):
|
||||
if not fnmatch(real_node.package_name, target_package):
|
||||
for unique_id, node in self.metric_nodes(included_nodes):
|
||||
if not fnmatch(node.package_name, target_package):
|
||||
continue
|
||||
if not fnmatch(real_node.name, target_name):
|
||||
if not fnmatch(node.name, target_name):
|
||||
continue
|
||||
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class SemanticModelSelectorMethod(SelectorMethod):
|
||||
@@ -387,13 +385,13 @@ class SemanticModelSelectorMethod(SelectorMethod):
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.semantic_model_nodes(included_nodes):
|
||||
if not fnmatch(real_node.package_name, target_package):
|
||||
for unique_id, node in self.semantic_model_nodes(included_nodes):
|
||||
if not fnmatch(node.package_name, target_package):
|
||||
continue
|
||||
if not fnmatch(real_node.name, target_name):
|
||||
if not fnmatch(node.name, target_name):
|
||||
continue
|
||||
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class SavedQuerySelectorMethod(SelectorMethod):
|
||||
@@ -412,13 +410,13 @@ class SavedQuerySelectorMethod(SelectorMethod):
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.saved_query_nodes(included_nodes):
|
||||
if not fnmatch(real_node.package_name, target_package):
|
||||
for unique_id, node in self.saved_query_nodes(included_nodes):
|
||||
if not fnmatch(node.package_name, target_package):
|
||||
continue
|
||||
if not fnmatch(real_node.name, target_name):
|
||||
if not fnmatch(node.name, target_name):
|
||||
continue
|
||||
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class PathSelectorMethod(SelectorMethod):
|
||||
@@ -431,35 +429,35 @@ class PathSelectorMethod(SelectorMethod):
|
||||
else:
|
||||
root = Path.cwd()
|
||||
paths = set(p.relative_to(root) for p in root.glob(selector))
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
ofp = Path(real_node.original_file_path)
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
ofp = Path(node.original_file_path)
|
||||
if ofp in paths:
|
||||
yield node
|
||||
if hasattr(real_node, "patch_path") and real_node.patch_path: # type: ignore
|
||||
pfp = real_node.patch_path.split("://")[1] # type: ignore
|
||||
yield unique_id
|
||||
if hasattr(node, "patch_path") and node.patch_path: # type: ignore
|
||||
pfp = node.patch_path.split("://")[1] # type: ignore
|
||||
ymlfp = Path(pfp)
|
||||
if ymlfp in paths:
|
||||
yield node
|
||||
yield unique_id
|
||||
if any(parent in paths for parent in ofp.parents):
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class FileSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""Yields nodes from included that match the given file name."""
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if fnmatch(Path(real_node.original_file_path).name, selector):
|
||||
yield node
|
||||
elif fnmatch(Path(real_node.original_file_path).stem, selector):
|
||||
yield node
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
if fnmatch(Path(node.original_file_path).name, selector):
|
||||
yield unique_id
|
||||
elif fnmatch(Path(node.original_file_path).stem, selector):
|
||||
yield unique_id
|
||||
|
||||
|
||||
class PackageSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""Yields nodes from included that have the specified package"""
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if fnmatch(real_node.package_name, selector):
|
||||
yield node
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
if fnmatch(node.package_name, selector):
|
||||
yield unique_id
|
||||
|
||||
|
||||
def _getattr_descend(obj: Any, attrs: List[str]) -> Any:
|
||||
@@ -501,9 +499,9 @@ class ConfigSelectorMethod(SelectorMethod):
|
||||
# search sources is kind of useless now source configs only have
|
||||
# 'enabled', which you can't really filter on anyway, but maybe we'll
|
||||
# add more someday, so search them anyway.
|
||||
for node, real_node in self.configurable_nodes(included_nodes):
|
||||
for unique_id, node in self.configurable_nodes(included_nodes):
|
||||
try:
|
||||
value = _getattr_descend(real_node.config, parts)
|
||||
value = _getattr_descend(node.config, parts)
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
@@ -513,7 +511,7 @@ class ConfigSelectorMethod(SelectorMethod):
|
||||
or (CaseInsensitive(selector) == "true" and True in value)
|
||||
or (CaseInsensitive(selector) == "false" and False in value)
|
||||
):
|
||||
yield node
|
||||
yield unique_id
|
||||
else:
|
||||
if (
|
||||
(selector == value)
|
||||
@@ -521,7 +519,7 @@ class ConfigSelectorMethod(SelectorMethod):
|
||||
or (CaseInsensitive(selector) == "false")
|
||||
and value is False
|
||||
):
|
||||
yield node
|
||||
yield unique_id
|
||||
|
||||
|
||||
class ResourceTypeSelectorMethod(SelectorMethod):
|
||||
@@ -530,9 +528,9 @@ class ResourceTypeSelectorMethod(SelectorMethod):
|
||||
resource_type = NodeType(selector)
|
||||
except ValueError as exc:
|
||||
raise DbtRuntimeError(f'Invalid resource_type selector "{selector}"') from exc
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if real_node.resource_type == resource_type:
|
||||
yield node
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
if node.resource_type == resource_type:
|
||||
yield unique_id
|
||||
|
||||
|
||||
class TestNameSelectorMethod(SelectorMethod):
|
||||
@@ -762,9 +760,9 @@ class ResultSelectorMethod(SelectorMethod):
|
||||
matches = set(
|
||||
result.unique_id for result in self.previous_state.results if result.status == selector
|
||||
)
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if node in matches:
|
||||
yield node
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
if unique_id in matches:
|
||||
yield unique_id
|
||||
|
||||
|
||||
class SourceStatusSelectorMethod(SelectorMethod):
|
||||
@@ -816,37 +814,37 @@ class SourceStatusSelectorMethod(SelectorMethod):
|
||||
):
|
||||
matches.remove(unique_id)
|
||||
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if node in matches:
|
||||
yield node
|
||||
for unique_id, node in self.all_nodes(included_nodes):
|
||||
if unique_id in matches:
|
||||
yield unique_id
|
||||
|
||||
|
||||
class VersionSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
for node, real_node in self.parsed_nodes(included_nodes):
|
||||
if isinstance(real_node, ModelNode):
|
||||
for unique_id, node in self.parsed_nodes(included_nodes):
|
||||
if isinstance(node, ModelNode):
|
||||
if selector == "latest":
|
||||
if real_node.is_latest_version:
|
||||
yield node
|
||||
if node.is_latest_version:
|
||||
yield unique_id
|
||||
elif selector == "prerelease":
|
||||
if (
|
||||
real_node.version
|
||||
and real_node.latest_version
|
||||
and UnparsedVersion(v=real_node.version)
|
||||
> UnparsedVersion(v=real_node.latest_version)
|
||||
node.version
|
||||
and node.latest_version
|
||||
and UnparsedVersion(v=node.version)
|
||||
> UnparsedVersion(v=node.latest_version)
|
||||
):
|
||||
yield node
|
||||
yield unique_id
|
||||
elif selector == "old":
|
||||
if (
|
||||
real_node.version
|
||||
and real_node.latest_version
|
||||
and UnparsedVersion(v=real_node.version)
|
||||
< UnparsedVersion(v=real_node.latest_version)
|
||||
node.version
|
||||
and node.latest_version
|
||||
and UnparsedVersion(v=node.version)
|
||||
< UnparsedVersion(v=node.latest_version)
|
||||
):
|
||||
yield node
|
||||
yield unique_id
|
||||
elif selector == "none":
|
||||
if real_node.version is None:
|
||||
yield node
|
||||
if node.version is None:
|
||||
yield unique_id
|
||||
else:
|
||||
raise DbtRuntimeError(
|
||||
f'Invalid version type selector {selector}: expected one of: "latest", "prerelease", "old", or "none"'
|
||||
|
||||
@@ -26,6 +26,11 @@ class FixtureParser(Parser[UnitTestFileFixture]):
|
||||
assert isinstance(file_block.file, FixtureSourceFile)
|
||||
unique_id = self.generate_unique_id(file_block.name)
|
||||
|
||||
if file_block.file.path.relative_path.endswith(".sql"):
|
||||
rows = file_block.file.contents # type: ignore
|
||||
else: # endswith('.csv')
|
||||
rows = self.get_rows(file_block.file.contents) # type: ignore
|
||||
|
||||
fixture = UnitTestFileFixture(
|
||||
name=file_block.name,
|
||||
path=file_block.file.path.relative_path,
|
||||
@@ -33,7 +38,7 @@ class FixtureParser(Parser[UnitTestFileFixture]):
|
||||
package_name=self.project.project_name,
|
||||
unique_id=unique_id,
|
||||
resource_type=NodeType.Fixture,
|
||||
rows=self.get_rows(file_block.file.contents),
|
||||
rows=rows,
|
||||
)
|
||||
self.manifest.add_fixture(file_block.file, fixture)
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import time
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.graph.semantic_manifest import SemanticManifest
|
||||
from dbt_common.events.base_types import EventLevel
|
||||
from dbt_common.exceptions.base import DbtValidationError
|
||||
import dbt_common.utils
|
||||
import json
|
||||
import pprint
|
||||
@@ -43,6 +44,7 @@ from dbt.constants import (
|
||||
MANIFEST_FILE_NAME,
|
||||
PARTIAL_PARSE_FILE_NAME,
|
||||
SEMANTIC_MANIFEST_FILE_NAME,
|
||||
SECRET_ENV_PREFIX,
|
||||
)
|
||||
from dbt_common.helper_types import PathSet
|
||||
from dbt_common.events.functions import fire_event, get_invocation_id, warn_or_error
|
||||
@@ -62,6 +64,8 @@ from dbt.events.types import (
|
||||
StateCheckVarsHash,
|
||||
DeprecatedModel,
|
||||
DeprecatedReference,
|
||||
SpacesInModelNameDeprecation,
|
||||
TotalModelNamesWithSpacesDeprecation,
|
||||
UpcomingReferenceDeprecation,
|
||||
)
|
||||
from dbt.logger import DbtProcessState
|
||||
@@ -113,6 +117,7 @@ from dbt.exceptions import (
|
||||
TargetNotFoundError,
|
||||
AmbiguousAliasError,
|
||||
InvalidAccessTypeError,
|
||||
scrub_secrets,
|
||||
)
|
||||
from dbt.parser.base import Parser
|
||||
from dbt.parser.analysis import AnalysisParser
|
||||
@@ -520,6 +525,7 @@ class ManifestLoader:
|
||||
self.write_manifest_for_partial_parse()
|
||||
|
||||
self.check_for_model_deprecations()
|
||||
self.check_for_spaces_in_model_names()
|
||||
|
||||
return self.manifest
|
||||
|
||||
@@ -621,6 +627,47 @@ class ManifestLoader:
|
||||
)
|
||||
)
|
||||
|
||||
def check_for_spaces_in_model_names(self):
|
||||
"""Validates that model names do not contain spaces
|
||||
|
||||
If `DEBUG` flag is `False`, logs only first bad model name
|
||||
If `DEBUG` flag is `True`, logs every bad model name
|
||||
If `ALLOW_SPACES_IN_MODEL_NAMES` is `False`, logs are `ERROR` level and an exception is raised if any names are bad
|
||||
If `ALLOW_SPACES_IN_MODEL_NAMES` is `True`, logs are `WARN` level
|
||||
"""
|
||||
improper_model_names = 0
|
||||
level = (
|
||||
EventLevel.WARN
|
||||
if self.root_project.args.ALLOW_SPACES_IN_MODEL_NAMES
|
||||
else EventLevel.ERROR
|
||||
)
|
||||
|
||||
for node in self.manifest.nodes.values():
|
||||
if isinstance(node, ModelNode) and " " in node.name:
|
||||
if improper_model_names == 0 or self.root_project.args.DEBUG:
|
||||
fire_event(
|
||||
SpacesInModelNameDeprecation(
|
||||
model_name=node.name,
|
||||
model_version=version_to_str(node.version),
|
||||
level=level.value,
|
||||
),
|
||||
level=level,
|
||||
)
|
||||
improper_model_names += 1
|
||||
|
||||
if improper_model_names > 0:
|
||||
fire_event(
|
||||
TotalModelNamesWithSpacesDeprecation(
|
||||
count_invalid_names=improper_model_names,
|
||||
show_debug_hint=(not self.root_project.args.DEBUG),
|
||||
level=level.value,
|
||||
),
|
||||
level=level,
|
||||
)
|
||||
|
||||
if level == EventLevel.ERROR:
|
||||
raise DbtValidationError("Model names cannot contain spaces")
|
||||
|
||||
def load_and_parse_macros(self, project_parser_files):
|
||||
for project in self.all_projects.values():
|
||||
if project.project_name not in project_parser_files:
|
||||
@@ -835,13 +882,6 @@ class ManifestLoader:
|
||||
)
|
||||
valid = False
|
||||
reparse_reason = ReparseReason.proj_env_vars_changed
|
||||
if (
|
||||
self.manifest.state_check.profile_env_vars_hash
|
||||
!= manifest.state_check.profile_env_vars_hash
|
||||
):
|
||||
fire_event(UnableToPartialParse(reason="env vars used in profiles.yml have changed"))
|
||||
valid = False
|
||||
reparse_reason = ReparseReason.prof_env_vars_changed
|
||||
|
||||
missing_keys = {
|
||||
k
|
||||
@@ -951,6 +991,9 @@ class ManifestLoader:
|
||||
# of env_vars, that would need to change.
|
||||
# We are using the parsed cli_vars instead of config.args.vars, in order
|
||||
# to sort them and avoid reparsing because of ordering issues.
|
||||
secret_vars = [
|
||||
v for k, v in config.cli_vars.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()
|
||||
]
|
||||
stringified_cli_vars = pprint.pformat(config.cli_vars)
|
||||
vars_hash = FileHash.from_contents(
|
||||
"\x00".join(
|
||||
@@ -965,7 +1008,7 @@ class ManifestLoader:
|
||||
fire_event(
|
||||
StateCheckVarsHash(
|
||||
checksum=vars_hash.checksum,
|
||||
vars=stringified_cli_vars,
|
||||
vars=scrub_secrets(stringified_cli_vars, secret_vars),
|
||||
profile=config.args.profile,
|
||||
target=config.args.target,
|
||||
version=__version__,
|
||||
@@ -980,18 +1023,18 @@ class ManifestLoader:
|
||||
env_var_str += f"{key}:{config.project_env_vars[key]}|"
|
||||
project_env_vars_hash = FileHash.from_contents(env_var_str)
|
||||
|
||||
# Create a FileHash of the env_vars in the project
|
||||
key_list = list(config.profile_env_vars.keys())
|
||||
key_list.sort()
|
||||
env_var_str = ""
|
||||
for key in key_list:
|
||||
env_var_str += f"{key}:{config.profile_env_vars[key]}|"
|
||||
profile_env_vars_hash = FileHash.from_contents(env_var_str)
|
||||
# Create a hash of the connection_info, which user has access to in
|
||||
# jinja context. Thus attributes here may affect the parsing result.
|
||||
# Ideally we should not expose all of the connection info to the jinja.
|
||||
|
||||
# Create a FileHash of the profile file
|
||||
profile_path = os.path.join(get_flags().PROFILES_DIR, "profiles.yml")
|
||||
with open(profile_path) as fp:
|
||||
profile_hash = FileHash.from_contents(fp.read())
|
||||
# Renaming this variable mean that we will have to do a whole lot more
|
||||
# change to make sure the previous manifest can be loaded correctly.
|
||||
# This is an example of naming should be chosen based on the functionality
|
||||
# rather than the implementation details.
|
||||
connection_keys = list(config.credentials.connection_info())
|
||||
# avoid reparsing because of ordering issues
|
||||
connection_keys.sort()
|
||||
profile_hash = FileHash.from_contents(pprint.pformat(connection_keys))
|
||||
|
||||
# Create a FileHashes for dbt_project for all dependencies
|
||||
project_hashes = {}
|
||||
@@ -1003,7 +1046,6 @@ class ManifestLoader:
|
||||
# Create the ManifestStateCheck object
|
||||
state_check = ManifestStateCheck(
|
||||
project_env_vars_hash=project_env_vars_hash,
|
||||
profile_env_vars_hash=profile_env_vars_hash,
|
||||
vars_hash=vars_hash,
|
||||
profile_hash=profile_hash,
|
||||
project_hashes=project_hashes,
|
||||
|
||||
@@ -145,11 +145,11 @@ def get_source_files(project, paths, extension, parse_file_type, saved_files, ig
|
||||
if parse_file_type == ParseFileType.Seed:
|
||||
fb_list.append(load_seed_source_file(fp, project.project_name))
|
||||
# singular tests live in /tests but only generic tests live
|
||||
# in /tests/generic so we want to skip those
|
||||
# in /tests/generic and fixtures in /tests/fixture so we want to skip those
|
||||
else:
|
||||
if parse_file_type == ParseFileType.SingularTest:
|
||||
path = pathlib.Path(fp.relative_path)
|
||||
if path.parts[0] == "generic":
|
||||
if path.parts[0] in ["generic", "fixtures"]:
|
||||
continue
|
||||
file = load_source_file(fp, parse_file_type, project.project_name, saved_files)
|
||||
# only append the list if it has contents. added to fix #3568
|
||||
@@ -431,7 +431,7 @@ def get_file_types_for_project(project):
|
||||
},
|
||||
ParseFileType.Fixture: {
|
||||
"paths": project.fixture_paths,
|
||||
"extensions": [".csv"],
|
||||
"extensions": [".csv", ".sql"],
|
||||
"parser": "FixtureParser",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -202,7 +202,7 @@ class SourcePatcher:
|
||||
# runtime.
|
||||
fire_event(
|
||||
FreshnessConfigProblem(
|
||||
msg=f"The configured adapter does not support metadata-based freshness. A loaded_at_field must be specified for source '{source.name}'."
|
||||
msg=f"The configured adapter does not support metadata-based freshness. A loaded_at_field must be specified for source '{source.name}.{table.name}'."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -68,6 +68,15 @@ class UnitTestManifestLoader:
|
||||
name = test_case.name
|
||||
if tested_node.is_versioned:
|
||||
name = name + f"_v{tested_node.version}"
|
||||
expected_sql: Optional[str] = None
|
||||
if test_case.expect.format == UnitTestFormat.SQL:
|
||||
expected_rows: List[Dict[str, Any]] = []
|
||||
expected_sql = test_case.expect.rows # type: ignore
|
||||
else:
|
||||
assert isinstance(test_case.expect.rows, List)
|
||||
expected_rows = deepcopy(test_case.expect.rows)
|
||||
|
||||
assert isinstance(expected_rows, List)
|
||||
unit_test_node = UnitTestNode(
|
||||
name=name,
|
||||
resource_type=NodeType.Unit,
|
||||
@@ -76,8 +85,7 @@ class UnitTestManifestLoader:
|
||||
original_file_path=test_case.original_file_path,
|
||||
unique_id=test_case.unique_id,
|
||||
config=UnitTestNodeConfig(
|
||||
materialized="unit",
|
||||
expected_rows=deepcopy(test_case.expect.rows), # type:ignore
|
||||
materialized="unit", expected_rows=expected_rows, expected_sql=expected_sql
|
||||
),
|
||||
raw_code=tested_node.raw_code,
|
||||
database=tested_node.database,
|
||||
@@ -120,17 +128,23 @@ class UnitTestManifestLoader:
|
||||
original_input_node = self._get_original_input_node(
|
||||
given.input, tested_node, test_case.name
|
||||
)
|
||||
input_name = original_input_node.name
|
||||
|
||||
common_fields = {
|
||||
"resource_type": NodeType.Model,
|
||||
"original_file_path": original_input_node.original_file_path,
|
||||
# root directory for input and output fixtures
|
||||
"original_file_path": unit_test_node.original_file_path,
|
||||
"config": ModelConfig(materialized="ephemeral"),
|
||||
"database": original_input_node.database,
|
||||
"alias": original_input_node.identifier,
|
||||
"schema": original_input_node.schema,
|
||||
"fqn": original_input_node.fqn,
|
||||
"checksum": FileHash.empty(),
|
||||
"raw_code": self._build_fixture_raw_code(given.rows, None),
|
||||
"raw_code": self._build_fixture_raw_code(given.rows, None, given.format),
|
||||
"package_name": original_input_node.package_name,
|
||||
"unique_id": f"model.{original_input_node.package_name}.{input_name}",
|
||||
"name": input_name,
|
||||
"path": f"{input_name}.sql",
|
||||
}
|
||||
|
||||
if original_input_node.resource_type in (
|
||||
@@ -138,14 +152,7 @@ class UnitTestManifestLoader:
|
||||
NodeType.Seed,
|
||||
NodeType.Snapshot,
|
||||
):
|
||||
input_name = original_input_node.name
|
||||
input_node = ModelNode(
|
||||
**common_fields,
|
||||
package_name=original_input_node.package_name,
|
||||
unique_id=f"model.{original_input_node.package_name}.{input_name}",
|
||||
name=input_name,
|
||||
path=original_input_node.path or f"{input_name}.sql",
|
||||
)
|
||||
input_node = ModelNode(**common_fields)
|
||||
if (
|
||||
original_input_node.resource_type == NodeType.Model
|
||||
and original_input_node.version
|
||||
@@ -156,13 +163,8 @@ class UnitTestManifestLoader:
|
||||
# We are reusing the database/schema/identifier from the original source,
|
||||
# but that shouldn't matter since this acts as an ephemeral model which just
|
||||
# wraps a CTE around the unit test node.
|
||||
input_name = original_input_node.name
|
||||
input_node = UnitTestSourceDefinition(
|
||||
**common_fields,
|
||||
package_name=original_input_node.package_name,
|
||||
unique_id=f"model.{original_input_node.package_name}.{input_name}",
|
||||
name=original_input_node.name, # must be the same name for source lookup to work
|
||||
path=input_name + ".sql", # for writing out compiled_code
|
||||
source_name=original_input_node.source_name, # needed for source lookup
|
||||
)
|
||||
# Sources need to go in the sources dictionary in order to create the right lookup
|
||||
@@ -178,12 +180,15 @@ class UnitTestManifestLoader:
|
||||
# Add unique ids of input_nodes to depends_on
|
||||
unit_test_node.depends_on.nodes.append(input_node.unique_id)
|
||||
|
||||
def _build_fixture_raw_code(self, rows, column_name_to_data_types) -> str:
|
||||
def _build_fixture_raw_code(self, rows, column_name_to_data_types, fixture_format) -> str:
|
||||
# We're not currently using column_name_to_data_types, but leaving here for
|
||||
# possible future use.
|
||||
return ("{{{{ get_fixture_sql({rows}, {column_name_to_data_types}) }}}}").format(
|
||||
rows=rows, column_name_to_data_types=column_name_to_data_types
|
||||
)
|
||||
if fixture_format == UnitTestFormat.SQL:
|
||||
return rows
|
||||
else:
|
||||
return ("{{{{ get_fixture_sql({rows}, {column_name_to_data_types}) }}}}").format(
|
||||
rows=rows, column_name_to_data_types=column_name_to_data_types
|
||||
)
|
||||
|
||||
def _get_original_input_node(self, input: str, tested_node: ModelNode, test_case_name: str):
|
||||
"""
|
||||
@@ -358,13 +363,29 @@ class UnitTestParser(YamlReader):
|
||||
)
|
||||
|
||||
if ut_fixture.fixture:
|
||||
# find fixture file object and store unit_test_definition unique_id
|
||||
fixture = self._get_fixture(ut_fixture.fixture, self.project.project_name)
|
||||
fixture_source_file = self.manifest.files[fixture.file_id]
|
||||
fixture_source_file.unit_tests.append(unit_test_definition.unique_id)
|
||||
ut_fixture.rows = fixture.rows
|
||||
ut_fixture.rows = self.get_fixture_file_rows(
|
||||
ut_fixture.fixture, self.project.project_name, unit_test_definition.unique_id
|
||||
)
|
||||
else:
|
||||
ut_fixture.rows = self._convert_csv_to_list_of_dicts(ut_fixture.rows)
|
||||
elif ut_fixture.format == UnitTestFormat.SQL:
|
||||
if not (isinstance(ut_fixture.rows, str) or isinstance(ut_fixture.fixture, str)):
|
||||
raise ParsingError(
|
||||
f"Unit test {unit_test_definition.name} has {fixture_type} rows or fixtures "
|
||||
f"which do not match format {ut_fixture.format}. Expected string."
|
||||
)
|
||||
|
||||
if ut_fixture.fixture:
|
||||
ut_fixture.rows = self.get_fixture_file_rows(
|
||||
ut_fixture.fixture, self.project.project_name, unit_test_definition.unique_id
|
||||
)
|
||||
|
||||
def get_fixture_file_rows(self, fixture_name, project_name, utdef_unique_id):
|
||||
# find fixture file object and store unit_test_definition unique_id
|
||||
fixture = self._get_fixture(fixture_name, project_name)
|
||||
fixture_source_file = self.manifest.files[fixture.file_id]
|
||||
fixture_source_file.unit_tests.append(utdef_unique_id)
|
||||
return fixture.rows
|
||||
|
||||
def _convert_csv_to_list_of_dicts(self, csv_string: str) -> List[Dict[str, Any]]:
|
||||
dummy_file = StringIO(csv_string)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user