forked from repo-mirrors/dbt-core
Compare commits
123 Commits
jerco/sql-
...
jerco/test
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7705a3e4b | ||
|
|
92847ce90f | ||
|
|
e48f7ab32e | ||
|
|
16dc2be556 | ||
|
|
eea872c319 | ||
|
|
189c06dbb1 | ||
|
|
74662d1527 | ||
|
|
75f3e8cb74 | ||
|
|
aeee1c23a6 | ||
|
|
e50678c914 | ||
|
|
ae62f5708c | ||
|
|
cda88d1948 | ||
|
|
e7218d3e99 | ||
|
|
2c42fb436c | ||
|
|
a9e1a0e00a | ||
|
|
0d8e061a3d | ||
|
|
7532420eef | ||
|
|
03b17ff401 | ||
|
|
fc1fc2d5e9 | ||
|
|
7e43f36bb1 | ||
|
|
72c17c4464 | ||
|
|
3996a69861 | ||
|
|
aa8115aa5e | ||
|
|
ab0c3510eb | ||
|
|
4480d05cfb | ||
|
|
788694ec5b | ||
|
|
fb5bb7fff3 | ||
|
|
c270a77552 | ||
|
|
a2e040f389 | ||
|
|
a4376b96d8 | ||
|
|
ed5df342ca | ||
|
|
96f063e077 | ||
|
|
ee8f81de6a | ||
|
|
935edc70aa | ||
|
|
28c44a9be7 | ||
|
|
a2b3602485 | ||
|
|
3733817488 | ||
|
|
c5fb6c275a | ||
|
|
f633e9936f | ||
|
|
4e57c51c7a | ||
|
|
6267572ba7 | ||
|
|
32e1924c3b | ||
|
|
55af3c78d7 | ||
|
|
bdff19d909 | ||
|
|
f87c7819fb | ||
|
|
33694f3772 | ||
|
|
ebfc18408b | ||
|
|
6958f4f12e | ||
|
|
1f898c859a | ||
|
|
ce0bcc08a6 | ||
|
|
d1ae9dd37f | ||
|
|
31a3f2bdee | ||
|
|
1390715590 | ||
|
|
d09459c980 | ||
|
|
979e1c74bf | ||
|
|
7d0fccd63f | ||
|
|
37b8b65aad | ||
|
|
0211668361 | ||
|
|
f8c8322bb4 | ||
|
|
14c2bd9959 | ||
|
|
8db6bac1db | ||
|
|
080dd41876 | ||
|
|
8e9702cec5 | ||
|
|
5ff81c244e | ||
|
|
cfe81e81fd | ||
|
|
365414b5fc | ||
|
|
ec46be7368 | ||
|
|
f23a403468 | ||
|
|
15ad34e415 | ||
|
|
bacc891703 | ||
|
|
a2e167761c | ||
|
|
cce8fda06c | ||
|
|
dd4ac1ba4a | ||
|
|
401ebc2768 | ||
|
|
83612a98b7 | ||
|
|
827eae2750 | ||
|
|
3a3bedcd8e | ||
|
|
c1dfb4e6e6 | ||
|
|
5852f17f0b | ||
|
|
a94156703d | ||
|
|
2b3fb7a5d0 | ||
|
|
5f2a43864f | ||
|
|
88fbc94db2 | ||
|
|
6c277b5fe1 | ||
|
|
40e64b238c | ||
|
|
581bf51574 | ||
|
|
899b0ef224 | ||
|
|
3ade206e86 | ||
|
|
58bd750007 | ||
|
|
0ec829a096 | ||
|
|
7f953a6d48 | ||
|
|
0b92f04683 | ||
|
|
3f37a43a8c | ||
|
|
204d53516a | ||
|
|
5071b00baa | ||
|
|
81118d904a | ||
|
|
69cdc4148e | ||
|
|
1c71bf414d | ||
|
|
7cf57ae72d | ||
|
|
1b6f95fef4 | ||
|
|
38940eeeea | ||
|
|
6c950bad7c | ||
|
|
5e681929ae | ||
|
|
ea5a9da71e | ||
|
|
9c5ee59e19 | ||
|
|
55b1d3a191 | ||
|
|
a968aa7725 | ||
|
|
5e0a765917 | ||
|
|
0aeb9976f4 | ||
|
|
30a7da8112 | ||
|
|
f6a9dae422 | ||
|
|
62a7163334 | ||
|
|
e2f0467f5d | ||
|
|
3e3ecb1c3f | ||
|
|
27511d807f | ||
|
|
15077d087c | ||
|
|
5b01cc0c22 | ||
|
|
d1bcff865d | ||
|
|
0fce63665c | ||
|
|
1183e85eb4 | ||
|
|
3b86243f04 | ||
|
|
c251dae75e | ||
|
|
ecfd77f1ca |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.0.1
|
||||
current_version = 1.2.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
@@ -24,8 +24,6 @@ values =
|
||||
[bumpversion:part:pre]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
@@ -37,3 +35,7 @@ first_value = 1
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
[bumpversion:file:docker/Dockerfile]
|
||||
|
||||
[bumpversion:file:tests/adapter/setup.py]
|
||||
|
||||
[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
## dbt-core 1.1.0 (TBD)
|
||||
|
||||
### Features
|
||||
- Added Support for Semantic Versioning ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
||||
- New Dockerfile to support specific db adapters and platforms. See docker/README.md for details ([#4495](https://github.com/dbt-labs/dbt-core/issues/4495), [#4487](https://github.com/dbt-labs/dbt-core/pull/4487))
|
||||
- Allow unique_key to take a list ([#2479](https://github.com/dbt-labs/dbt-core/issues/2479), [#4618](https://github.com/dbt-labs/dbt-core/pull/4618))
|
||||
- Add `--quiet` global flag and `print` Jinja function ([#3451](https://github.com/dbt-labs/dbt-core/issues/3451), [#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
||||
|
||||
### Fixes
|
||||
- User wasn't asked for permission to overwite a profile entry when running init inside an existing project ([#4375](https://github.com/dbt-labs/dbt-core/issues/4375), [#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
||||
- Allow override of string and numeric types for adapters. ([#4603](https://github.com/dbt-labs/dbt-core/issues/4603))
|
||||
- A change in secret environment variables won't trigger a full reparse [#4650](https://github.com/dbt-labs/dbt-core/issues/4650) [4665](https://github.com/dbt-labs/dbt-core/pull/4665)
|
||||
- Fix misspellings and typos in docstrings ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
||||
|
||||
### Under the hood
|
||||
- Testing cleanup ([#4496](https://github.com/dbt-labs/dbt-core/pull/4496), [#4509](https://github.com/dbt-labs/dbt-core/pull/4509))
|
||||
- Clean up test deprecation warnings ([#3988](https://github.com/dbt-labs/dbt-core/issue/3988), [#4556](https://github.com/dbt-labs/dbt-core/pull/4556))
|
||||
- Use mashumaro for serialization in event logging ([#4504](https://github.com/dbt-labs/dbt-core/issues/4504), [#4505](https://github.com/dbt-labs/dbt-core/pull/4505))
|
||||
- Drop support for Python 3.7.0 + 3.7.1 ([#4584](https://github.com/dbt-labs/dbt-core/issues/4584), [#4585](https://github.com/dbt-labs/dbt-core/pull/4585), [#4643](https://github.com/dbt-labs/dbt-core/pull/4643))
|
||||
- Re-format codebase (except tests) using pre-commit hooks ([#3195](https://github.com/dbt-labs/dbt-core/issues/3195), [#4697](https://github.com/dbt-labs/dbt-core/pull/4697))
|
||||
- Add deps module README ([#4686](https://github.com/dbt-labs/dbt-core/pull/4686/))
|
||||
- Initial conversion of tests to pytest ([#4690](https://github.com/dbt-labs/dbt-core/issues/4690), [#4691](https://github.com/dbt-labs/dbt-core/pull/4691))
|
||||
- Fix errors in Windows for tests/functions ([#4781](https://github.com/dbt-labs/dbt-core/issues/4781), [#4767](https://github.com/dbt-labs/dbt-core/pull/4767))
|
||||
|
||||
Contributors:
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||
- [@alswang18](https://github.com/alswang18) ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
||||
- [@emartens](https://github.com/ehmartens) ([#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
||||
- [@mdesmet](https://github.com/mdesmet) ([#4604](https://github.com/dbt-labs/dbt-core/pull/4604))
|
||||
- [@kazanzhy](https://github.com/kazanzhy) ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
||||
@@ -26,6 +26,12 @@ changie batch <version> --move-dir '<version>' --prerelease 'rc1'
|
||||
changie merge
|
||||
```
|
||||
|
||||
Example
|
||||
```
|
||||
changie batch 1.0.5 --move-dir '1.0.5' --prerelease 'rc1'
|
||||
changie merge
|
||||
```
|
||||
|
||||
#### Final Release Workflow
|
||||
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
||||
|
||||
@@ -34,7 +40,14 @@ changie batch <version> --include '<version>' --remove-prereleases
|
||||
changie merge
|
||||
```
|
||||
|
||||
Example
|
||||
```
|
||||
changie batch 1.0.5 --include '1.0.5' --remove-prereleases
|
||||
changie merge
|
||||
```
|
||||
|
||||
### A Note on Manual Edits & Gotchas
|
||||
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
||||
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
||||
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
||||
- changie can modify, create and delete files depending on the command you run. This is expected. Be sure to commit everything that has been modified and deleted.
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](CONTRIBUTING.md)
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
7
.changes/unreleased/Dependencies-20220427-195128.yaml
Normal file
7
.changes/unreleased/Dependencies-20220427-195128.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Dependencies
|
||||
body: "Bump ubuntu from 20.04 to 22.04"
|
||||
time: 2022-04-27T19:51:28.000000-05:00
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: "4904"
|
||||
PR: "5141"
|
||||
7
.changes/unreleased/Dependencies-20220506-160907.yaml
Normal file
7
.changes/unreleased/Dependencies-20220506-160907.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Dependencies
|
||||
body: "Bumping hologram version"
|
||||
time: 2022-05-06T16:09:07.000000-05:00
|
||||
custom:
|
||||
Author: leahwicz
|
||||
Issue: "5219"
|
||||
PR: "5218"
|
||||
7
.changes/unreleased/Features-20220408-112610.yaml
Normal file
7
.changes/unreleased/Features-20220408-112610.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Add selector method when reading selector definitions
|
||||
time: 2022-04-08T11:26:10.713088+10:00
|
||||
custom:
|
||||
Author: danieldiamond
|
||||
Issue: "4821"
|
||||
PR: "4827"
|
||||
7
.changes/unreleased/Features-20220423-231756.yaml
Normal file
7
.changes/unreleased/Features-20220423-231756.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Add set and zip function to contexts
|
||||
time: 2022-04-23T23:17:56.851793+12:00
|
||||
custom:
|
||||
Author: jeremyyeo
|
||||
Issue: "2345"
|
||||
PR: "5107"
|
||||
7
.changes/unreleased/Features-20220424-132655.yaml
Normal file
7
.changes/unreleased/Features-20220424-132655.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Adds itertools to modules Jinja namespace
|
||||
time: 2022-04-24T13:26:55.008246+01:00
|
||||
custom:
|
||||
Author: bd3dowling
|
||||
Issue: "5130"
|
||||
PR: "5140"
|
||||
7
.changes/unreleased/Features-20220428-065644.yaml
Normal file
7
.changes/unreleased/Features-20220428-065644.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: allow target as an option in profile_template.yml
|
||||
time: 2022-04-28T06:56:44.511519-04:00
|
||||
custom:
|
||||
Author: alexrosenfeld10
|
||||
Issue: "5179"
|
||||
PR: "5184"
|
||||
7
.changes/unreleased/Features-20220503-142934.yaml
Normal file
7
.changes/unreleased/Features-20220503-142934.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: 'seed: Add new macro get_csv_sql'
|
||||
time: 2022-05-03T14:29:34.847959075Z
|
||||
custom:
|
||||
Author: adamantike
|
||||
Issue: "5206"
|
||||
PR: "5207"
|
||||
7
.changes/unreleased/Features-20220510-204949.yaml
Normal file
7
.changes/unreleased/Features-20220510-204949.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Grants as Node Configs
|
||||
time: 2022-05-10T20:49:49.197999-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5189"
|
||||
PR: "5230"
|
||||
7
.changes/unreleased/Features-20220512-215748.yaml
Normal file
7
.changes/unreleased/Features-20220512-215748.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Adds file selectors and support for file selectors in the default method selector
|
||||
time: 2022-05-12T21:57:48.289674-07:00
|
||||
custom:
|
||||
Author: jwills
|
||||
Issue: "5240"
|
||||
PR: "5241"
|
||||
7
.changes/unreleased/Fixes-20220317-210916.yaml
Normal file
7
.changes/unreleased/Fixes-20220317-210916.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Adding new cols to check_cols in snapshots
|
||||
time: 2022-03-17T21:09:16.977086+01:00
|
||||
custom:
|
||||
Author: GtheSheep
|
||||
Issue: "3146"
|
||||
PR: "4893"
|
||||
8
.changes/unreleased/Fixes-20220322-173753.yaml
Normal file
8
.changes/unreleased/Fixes-20220322-173753.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Fixes
|
||||
body: Truncate relation names when appending a suffix that will result in len > 63
|
||||
characters using make_temp_relation and make_backup_relation macros
|
||||
time: 2022-03-22T17:37:53.320082-07:00
|
||||
custom:
|
||||
Author: epapineau
|
||||
Issue: "2869"
|
||||
PR: "4921"
|
||||
7
.changes/unreleased/Fixes-20220415-112927.yaml
Normal file
7
.changes/unreleased/Fixes-20220415-112927.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Restore ability to utilize `updated_at` for check_cols snapshots
|
||||
time: 2022-04-15T11:29:27.063462-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "5076"
|
||||
PR: "5077"
|
||||
7
.changes/unreleased/Fixes-20220422-131227.yaml
Normal file
7
.changes/unreleased/Fixes-20220422-131227.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Fix retry logic to return values after initial try
|
||||
time: 2022-04-22T13:12:27.239055-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "5023"
|
||||
PR: "5137"
|
||||
7
.changes/unreleased/Fixes-20220422-135645.yaml
Normal file
7
.changes/unreleased/Fixes-20220422-135645.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Use yaml renderer (with target context) for rendering selectors
|
||||
time: 2022-04-22T13:56:45.147893-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5131"
|
||||
PR: "5136"
|
||||
7
.changes/unreleased/Fixes-20220425-203924.yaml
Normal file
7
.changes/unreleased/Fixes-20220425-203924.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Scrub secret env vars from CommandError in exception stacktrace
|
||||
time: 2022-04-25T20:39:24.365495+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5151"
|
||||
PR: "5152"
|
||||
7
.changes/unreleased/Fixes-20220426-202104.yaml
Normal file
7
.changes/unreleased/Fixes-20220426-202104.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure the metric name does not contain spaces
|
||||
time: 2022-04-26T20:21:04.360693-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "4572"
|
||||
PR: "5173"
|
||||
7
.changes/unreleased/Fixes-20220427-102648.yaml
Normal file
7
.changes/unreleased/Fixes-20220427-102648.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: When parsing 'all_sources' should be a list of unique dirs
|
||||
time: 2022-04-27T10:26:48.648388-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5120"
|
||||
PR: "5176"
|
||||
7
.changes/unreleased/Fixes-20220428-100157.yaml
Normal file
7
.changes/unreleased/Fixes-20220428-100157.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Add warning if yaml contains duplicate keys
|
||||
time: 2022-04-28T10:01:57.893956+12:00
|
||||
custom:
|
||||
Author: jeremyyeo
|
||||
Issue: "5114"
|
||||
PR: "5146"
|
||||
8
.changes/unreleased/Fixes-20220429-160742.yaml
Normal file
8
.changes/unreleased/Fixes-20220429-160742.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Fixes
|
||||
body: Modifying the drop_test_schema to work better with Redshift issues around locked
|
||||
tables and current transactions
|
||||
time: 2022-04-29T16:07:42.750046-05:00
|
||||
custom:
|
||||
Author: Mcknight-42
|
||||
Issue: "5200"
|
||||
PR: "5198"
|
||||
8
.changes/unreleased/Fixes-20220509-130021.yaml
Normal file
8
.changes/unreleased/Fixes-20220509-130021.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Fixes
|
||||
body: Fix column comparison in snapshot_check_all_get_existing_columns for check-strategy
|
||||
snapshots with explicit check_cols defined
|
||||
time: 2022-05-09T13:00:21.649028+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5222"
|
||||
PR: "5223"
|
||||
8
.changes/unreleased/Fixes-20220509-131312.yaml
Normal file
8
.changes/unreleased/Fixes-20220509-131312.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Fixes
|
||||
body: Changed how `--select state:modified` detects changes for macros nodes depend
|
||||
on
|
||||
time: 2022-05-09T13:13:12.889074-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5202"
|
||||
PR: "5224"
|
||||
7
.changes/unreleased/Fixes-20220511-123238.yaml
Normal file
7
.changes/unreleased/Fixes-20220511-123238.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Fix column comparison in snapshot_check_all_get_existing_columns to use adapter.get_columns_in_relation
|
||||
time: 2022-05-11T12:32:38.313321+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5222"
|
||||
PR: "5232"
|
||||
7
.changes/unreleased/Fixes-20220518-134610.yaml
Normal file
7
.changes/unreleased/Fixes-20220518-134610.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Remove docs file from manifest when removing doc node
|
||||
time: 2022-05-18T13:46:10.167143-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "4146"
|
||||
PR: "5270"
|
||||
8
.changes/unreleased/Fixes-20220601-135908.yaml
Normal file
8
.changes/unreleased/Fixes-20220601-135908.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Fixes
|
||||
body: Change node ancestor/descendant algo, fixes issue where downstream models aren't
|
||||
run when using networkx >= 2.8.1
|
||||
time: 2022-06-01T13:59:08.886215-05:00
|
||||
custom:
|
||||
Author: iknox-fa
|
||||
Issue: "5286"
|
||||
PR: "5326"
|
||||
7
.changes/unreleased/Fixes-20220601-194234.yaml
Normal file
7
.changes/unreleased/Fixes-20220601-194234.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Fixing Windows color regression
|
||||
time: 2022-06-01T19:42:34.263009-04:00
|
||||
custom:
|
||||
Author: leahwicz
|
||||
Issue: "5191"
|
||||
PR: "5327"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Automate changelog generation with changie
|
||||
time: 2022-02-18T16:13:19.882436-06:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "4652"
|
||||
PR: "4743"
|
||||
7
.changes/unreleased/Under the Hood-20220409-040539.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220409-040539.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Migrating 005_simple_seed to the new test framework.
|
||||
time: 2022-04-09T04:05:39.20045-07:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "200"
|
||||
PR: "5013"
|
||||
7
.changes/unreleased/Under the Hood-20220413-183014.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220413-183014.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Convert 029_docs_generate tests to new framework
|
||||
time: 2022-04-13T18:30:14.706391-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5035"
|
||||
PR: "5058"
|
||||
7
.changes/unreleased/Under the Hood-20220414-132206.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220414-132206.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move package deprecation check outside of package cache
|
||||
time: 2022-04-14T13:22:06.157579-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "5068"
|
||||
PR: "5069"
|
||||
7
.changes/unreleased/Under the Hood-20220427-112127.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220427-112127.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Mypy -> 0.942 + fixed import logic to allow for full mypy coverage
|
||||
time: 2022-04-27T11:21:27.499359-05:00
|
||||
custom:
|
||||
Author: iknox-fa
|
||||
Issue: "4805"
|
||||
PR: "5171"
|
||||
7
.changes/unreleased/Under the Hood-20220427-140628.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220427-140628.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Converted dbt list tests to pytest
|
||||
time: 2022-04-27T14:06:28.882908-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5049"
|
||||
PR: "5178"
|
||||
7
.changes/unreleased/Under the Hood-20220503-195212.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220503-195212.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: 'Fix: Call str and repr for UnsetProfileConfig without a RuntimeException'
|
||||
time: 2022-05-03T19:52:12.793729384+02:00
|
||||
custom:
|
||||
Author: tomasfarias
|
||||
Issue: "5081"
|
||||
PR: "5209"
|
||||
7
.changes/unreleased/Under the Hood-20220504-010031.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220504-010031.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Improve tracking error logging message
|
||||
time: 2022-05-04T01:00:31.60387036+02:00
|
||||
custom:
|
||||
Author: NicolasPA
|
||||
Issue: "5197"
|
||||
PR: "5211"
|
||||
8
.changes/unreleased/Under the Hood-20220509-092628.yaml
Normal file
8
.changes/unreleased/Under the Hood-20220509-092628.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Under the Hood
|
||||
body: 'Clean up materialization logic: more consistent relation names, loading from
|
||||
cache'
|
||||
time: 2022-05-09T09:26:28.551068+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "2869"
|
||||
PR: "4921"
|
||||
8
.changes/unreleased/Under the Hood-20220518-095144.yaml
Normal file
8
.changes/unreleased/Under the Hood-20220518-095144.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Under the Hood
|
||||
body: Use the default Python version for local dev and test instead of requiring Python
|
||||
3.8
|
||||
time: 2022-05-18T09:51:44.603193-07:00
|
||||
custom:
|
||||
Author: jwills
|
||||
Issue: "5257"
|
||||
PR: "5269"
|
||||
7
.changes/unreleased/Under the Hood-20220518-145522.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220518-145522.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix test for context set function
|
||||
time: 2022-05-18T14:55:22.554316-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5266"
|
||||
PR: "5272"
|
||||
7
.changes/unreleased/Under the Hood-20220601-105245.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220601-105245.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix pip upgrade step in CI for Windows
|
||||
time: 2022-06-01T10:52:45.872931-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5321"
|
||||
PR: "5320"
|
||||
7
.changes/unreleased/Under the Hood-20220601-112648.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220601-112648.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix unit test test_graph_selection
|
||||
time: 2022-06-01T11:26:48.725831-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5323"
|
||||
PR: "5324"
|
||||
7
.changes/unreleased/Under the Hood-20220606-230353.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220606-230353.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Update context readme + clean up context code"
|
||||
time: 2022-06-06T23:03:53.022568+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "4796"
|
||||
PR: "5334"
|
||||
@@ -8,15 +8,15 @@ versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
kinds:
|
||||
- label: Fixes
|
||||
- label: Features
|
||||
- label: Under the Hood
|
||||
- label: Breaking Changes
|
||||
- label: Features
|
||||
- label: Fixes
|
||||
- label: Docs
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
custom:
|
||||
- key: Author
|
||||
label: GitHub Name
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: Issue
|
||||
@@ -28,23 +28,33 @@ custom:
|
||||
type: int
|
||||
minLength: 4
|
||||
footerFormat: |
|
||||
Contributors:
|
||||
{{- $contributorDict := dict }}
|
||||
{{- $core_team := list "emmyoop" "nathaniel-may" "gshank" "leahwicz" "ChenyuLInx" "stu-k" "iknox-fa" "VersusFacit" "McKnight-42" "jtcohen6" }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot" }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $author := $change.Custom.Author }}
|
||||
{{- if not (has $author $core_team)}}
|
||||
{{- $pr := $change.Custom.PR }}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $prList := get $contributorDict $author }}
|
||||
{{- $prList = append $prList $pr }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- else }}
|
||||
{{- $prList := list $change.Custom.PR }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- end }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a PR */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- $pr := $change.Custom.PR }}
|
||||
{{- /* check if this contributor has other PRs associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $prList := get $contributorDict $author }}
|
||||
{{- $prList = append $prList $pr }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- else }}
|
||||
{{- $prList := list $change.Custom.PR }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
{{- end }}
|
||||
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
||||
{{- if $contributorDict}}
|
||||
### Contributors
|
||||
{{- range $k,$v := $contributorDict }}
|
||||
- [{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-core/pull/{{$element}}){{end}})
|
||||
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-core/pull/{{$element}}){{end}})
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,2 @@
|
||||
# Reformatting dbt-core via black, flake8, mypy, and assorted pre-commit hooks.
|
||||
43e3fc22c4eae4d3d901faba05e33c40f1f1dc5a
|
||||
6
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
6
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -6,7 +6,7 @@ body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this feature requests!
|
||||
Thanks for taking the time to fill out this feature request!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing feature request for this?
|
||||
@@ -14,6 +14,10 @@ body:
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
label: Is this your first time opening an issue?
|
||||
options:
|
||||
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the Feature
|
||||
|
||||
4
.github/pull_request_template.md
vendored
4
.github/pull_request_template.md
vendored
@@ -15,7 +15,9 @@ resolves #
|
||||
|
||||
### Checklist
|
||||
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] I have added information about my change to be included in the [CHANGELOG](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#Adding-CHANGELOG-Entry).
|
||||
- [ ] I have [opened an issue to add/update docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose), or docs changes are not required/relevant for this PR
|
||||
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#Adding-CHANGELOG-Entry)
|
||||
|
||||
18
.github/workflows/backport.yml
vendored
18
.github/workflows/backport.yml
vendored
@@ -13,22 +13,28 @@
|
||||
# This automates the backporting process
|
||||
|
||||
# **when?**
|
||||
# Once a PR is "Squash and merge"'d and it has been correctly labeled
|
||||
# according to the naming convention.
|
||||
# Once a PR is "Squash and merge"'d, by adding a backport label, this is triggered
|
||||
|
||||
name: Backport
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
- labeled
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
runs-on: ubuntu-18.04
|
||||
name: Backport
|
||||
runs-on: ubuntu-latest
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
github.event.pull_request.merged
|
||||
&& contains(github.event.label.name, 'backport')
|
||||
steps:
|
||||
- name: Backport
|
||||
uses: tibdex/backport@v1.1.1
|
||||
- uses: tibdex/backport@v2.0.2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
40
.github/workflows/changelog-check.yml
vendored
40
.github/workflows/changelog-check.yml
vendored
@@ -11,12 +11,16 @@
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
|
||||
# **when?**
|
||||
# This will run for all PRs going into main and *.latest.
|
||||
# This will run for all PRs going into main and *.latest. It will
|
||||
# run when they are opened, reopened, when any label is added or removed
|
||||
# and when new code is pushed to the branch. The action will then get
|
||||
# skipped if the 'Skip Changelog' label is present is any of the labels.
|
||||
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
defaults:
|
||||
@@ -27,9 +31,13 @@ permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: changelog
|
||||
if: "!contains(github.event.pull_request.labels.*.name, 'Skip Changelog')"
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -48,15 +56,23 @@ jobs:
|
||||
filters: |
|
||||
changelog:
|
||||
- added: '.changes/unreleased/**.yaml'
|
||||
- name: Check a file has been added to .changes/unreleased if required
|
||||
uses: actions/github-script@v6
|
||||
if: steps.filter.outputs.changelog == 'false' && !contains( github.event.pull_request.labels.*.name, 'Skip Changelog')
|
||||
- name: Check if comment already exists
|
||||
uses: peter-evans/find-comment@v1
|
||||
id: changelog_comment
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: "Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](CONTRIBUTING.md)."
|
||||
})
|
||||
core.setFailed('Changelog entry required to merge.')
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: ${{ env.changelog_comment }}
|
||||
- name: Create PR comment if changelog entry is missing, required, and does not exist
|
||||
if: |
|
||||
steps.filter.outputs.changelog == 'false' &&
|
||||
steps.changelog_comment.outputs.comment-body == ''
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: ${{ env.changelog_comment }}
|
||||
- name: Fail job if changelog entry is missing and required
|
||||
if: steps.filter.outputs.changelog == 'false'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: core.setFailed('Changelog entry required to merge.')
|
||||
|
||||
114
.github/workflows/dependency-changelog.yml
vendored
Normal file
114
.github/workflows/dependency-changelog.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
# **what?**
|
||||
# When dependabot create a PR, it always adds the `dependencies` label. This
|
||||
# action will add a corresponding changie yaml file to that PR when that label is added.
|
||||
# The file is created off a template:
|
||||
#
|
||||
# kind: Dependencies
|
||||
# body: <PR title>
|
||||
# time: <current timestamp>
|
||||
# custom:
|
||||
# Author: dependabot
|
||||
# Issue: 4904
|
||||
# PR: <PR number>
|
||||
#
|
||||
# **why?**
|
||||
# Automate changelog generation for more visability with automated dependency updates via dependabot.
|
||||
|
||||
# **when?**
|
||||
# Once a PR is created and it has been correctly labeled with `dependencies`. The intended use
|
||||
# is for the PRs created by dependabot. You can also manually trigger this by adding the
|
||||
# `dependencies` label at any time.
|
||||
|
||||
name: Dependency Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
# catch when the PR is opened with the label or when the label is added
|
||||
types: [opened, labeled]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
dependency_changelog:
|
||||
if: "contains(github.event.pull_request.labels.*.name, 'dependencies')"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# timestamp changes the order the changelog entries are listed in the final Changelog.md file. Precision is not
|
||||
# important here.
|
||||
# The timestamp on the filename and the timestamp in the contents of the file have different expected formats.
|
||||
- name: Get File Name Timestamp
|
||||
id: filename_time
|
||||
uses: nanzm/get-time-action@v1.1
|
||||
with:
|
||||
format: 'YYYYMMDD-HHmmss'
|
||||
|
||||
- name: Get File Content Timestamp
|
||||
id: file_content_time
|
||||
uses: nanzm/get-time-action@v1.1
|
||||
with:
|
||||
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
|
||||
|
||||
# changie expects files to be named in a specific pattern.
|
||||
- name: Generate Filepath
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "::set-output name=FILEPATH::$FILEPATH"
|
||||
|
||||
- name: Check if changelog file exists already
|
||||
# if there's already a changelog entry, don't add another one!
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
# For each filter, it sets output variable named by the filter to the text:
|
||||
# 'true' - if any of changed files matches any of filter rules
|
||||
# 'false' - if none of changed files matches any of filter rules
|
||||
# also, returns:
|
||||
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||
uses: dorny/paths-filter@v2
|
||||
id: changelog_check
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filters: |
|
||||
exists:
|
||||
- added: '.changes/unreleased/**.yaml'
|
||||
|
||||
- name: Checkout Branch
|
||||
if: steps.changelog_check.outputs.exists == 'false'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# specifying the ref avoids checking out the repository in a detached state
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
# If this is not set to false, Git push is performed with github.token and not the token
|
||||
# configured using the env: GITHUB_TOKEN in commit step
|
||||
persist-credentials: false
|
||||
|
||||
- name: Create file from template
|
||||
if: steps.changelog_check.outputs.exists == 'false'
|
||||
run: |
|
||||
echo kind: Dependencies > "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo 'body: "${{ github.event.pull_request.title }}"' >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo time: "${{ steps.file_content_time.outputs.time }}" >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo custom: >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo ' Author: ${{ github.event.pull_request.user.login }}' >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo ' Issue: "4904"' >> "${{ steps.fp.outputs.FILEPATH }}" # github.event.pull_request.issue for auto id?
|
||||
echo ' PR: "${{ github.event.pull_request.number }}"' >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
|
||||
- name: Commit Changelog File
|
||||
if: steps.changelog_check.outputs.exists == 'false'
|
||||
uses: gr2m/create-or-update-pull-request-action@v1
|
||||
env:
|
||||
# When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions
|
||||
# Workflow run. This is due to limitations set by GitHub.
|
||||
# See: https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow
|
||||
# When you use the repository's GITHUB_TOKEN to perform tasks on behalf of the GitHub Actions
|
||||
# app, events triggered by the GITHUB_TOKEN will not create a new workflow run. This prevents
|
||||
# you from accidentally creating recursive workflow runs. To get around this, use a Personal
|
||||
# Access Token to commit changes.
|
||||
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
with:
|
||||
branch: ${{ github.event.pull_request.head.ref }}
|
||||
# author expected in the format "Lorem J. Ipsum <lorem@example.com>"
|
||||
author: "Github Build Bot <buildbot@fishtownanalytics.com>"
|
||||
commit-message: "Add automated changelog yaml from template"
|
||||
37
.github/workflows/main.yml
vendored
37
.github/workflows/main.yml
vendored
@@ -48,13 +48,14 @@ jobs:
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip --version
|
||||
pip install pre-commit
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install pre-commit
|
||||
pre-commit --version
|
||||
pip install mypy==0.782
|
||||
python -m pip install mypy==0.942
|
||||
mypy --version
|
||||
pip install -r editable-requirements.txt
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install -r dev-requirements.txt
|
||||
dbt --version
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
@@ -68,7 +69,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.7, 3.8, 3.9]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
@@ -85,9 +86,9 @@ jobs:
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip --version
|
||||
pip install tox
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
@@ -112,7 +113,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.7, 3.8, 3.9]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
@@ -148,9 +149,9 @@ jobs:
|
||||
|
||||
- name: Install python tools
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip --version
|
||||
pip install tox
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tests
|
||||
@@ -189,9 +190,9 @@ jobs:
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
python -m pip --version
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
@@ -209,7 +210,7 @@ jobs:
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
@@ -218,7 +219,7 @@ jobs:
|
||||
- name: Install source distributions
|
||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||
run: |
|
||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
|
||||
176
.github/workflows/performance.yml
vendored
176
.github/workflows/performance.yml
vendored
@@ -1,176 +0,0 @@
|
||||
name: Performance Regression Tests
|
||||
# Schedule triggers
|
||||
on:
|
||||
# runs twice a day at 10:05am and 10:05pm
|
||||
schedule:
|
||||
- cron: "5 10,22 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# checks fmt of runner code
|
||||
# purposefully not a dependency of any other job
|
||||
# will block merging, but not prevent developing
|
||||
fmt:
|
||||
name: Cargo fmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- run: rustup component add rustfmt
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||
|
||||
# runs any tests associated with the runner
|
||||
# these tests make sure the runner logic is correct
|
||||
test-runner:
|
||||
name: Test Runner
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# turns errors into warnings
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --manifest-path performance/runner/Cargo.toml
|
||||
|
||||
# build an optimized binary to be used as the runner in later steps
|
||||
build-runner:
|
||||
needs: [test-runner]
|
||||
name: Build Runner
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --manifest-path performance/runner/Cargo.toml
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
path: performance/runner/target/release/runner
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
measure-dev:
|
||||
needs: [build-runner]
|
||||
name: Measure Dev Branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: install dbt
|
||||
run: pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
- name: install hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run
|
||||
run: ./runner measure -b dev -p ${{ github.workspace }}/performance/projects/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dev-results
|
||||
path: performance/results/
|
||||
|
||||
# run the performance measurements on the release branch which we use
|
||||
# as a performance baseline. This part takes by far the longest, so
|
||||
# we do everything we can first so the job fails fast.
|
||||
# -----
|
||||
# we need to checkout dbt twice in this job: once for the baseline dbt
|
||||
# version, and once to get the latest regression testing projects,
|
||||
# metrics, and runner code from the develop or current branch so that
|
||||
# the calculations match for both versions of dbt we are comparing.
|
||||
measure-baseline:
|
||||
needs: [build-runner]
|
||||
name: Measure Baseline Branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout latest
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: "0.20.latest"
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: move repo up a level
|
||||
run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline
|
||||
- name: "[debug] ls new dbt location"
|
||||
run: ls ${{ github.workspace }}/../baseline/dbt/
|
||||
# installation creates egg-links so we have to preserve source
|
||||
- name: install dbt from new location
|
||||
run: cd ${{ github.workspace }}/../baseline/dbt/ && pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
# checkout the current branch to get all the target projects
|
||||
# this deletes the old checked out code which is why we had to copy before
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
- name: install hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run runner
|
||||
run: ./runner measure -b baseline -p ${{ github.workspace }}/performance/projects/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: baseline-results
|
||||
path: performance/results/
|
||||
|
||||
# detect regressions on the output generated from measuring
|
||||
# the two branches. Exits with non-zero code if a regression is detected.
|
||||
calculate-regressions:
|
||||
needs: [measure-dev, measure-baseline]
|
||||
name: Compare Results
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dev-results
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: baseline-results
|
||||
- name: "[debug] ls result files"
|
||||
run: ls
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: make results directory
|
||||
run: mkdir ./final-output/
|
||||
- name: run calculation
|
||||
run: ./runner calculate -r ./ -o ./final-output/
|
||||
# always attempt to upload the results even if there were regressions found
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: final-calculations
|
||||
path: ./final-output/*
|
||||
3
.github/workflows/release-docker.yml
vendored
3
.github/workflows/release-docker.yml
vendored
@@ -12,6 +12,9 @@
|
||||
|
||||
name: Docker release
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
|
||||
3
.github/workflows/stale.yml
vendored
3
.github/workflows/stale.yml
vendored
@@ -12,7 +12,6 @@ jobs:
|
||||
with:
|
||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
|
||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||
days-before-stale: 180
|
||||
# ignore checking issues with the following labels
|
||||
exempt-issue-labels: "epic,discussion"
|
||||
|
||||
33
.github/workflows/triage-labels.yml
vendored
Normal file
33
.github/workflows/triage-labels.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# **what?**
|
||||
# When the core team triages, we sometimes need more information from the issue creator. In
|
||||
# those cases we remove the `triage` label and add the `awaiting_response` label. Once we
|
||||
# recieve a response in the form of a comment, we want the `awaiting_response` label removed
|
||||
# in favor of the `triage` label so we are aware that the issue needs action.
|
||||
|
||||
# **why?**
|
||||
# To help with out team triage issue tracking
|
||||
|
||||
# **when?**
|
||||
# This will run when a comment is added to an issue and that issue has to `awaiting_response` label.
|
||||
|
||||
name: Update Triage Label
|
||||
|
||||
on: issue_comment
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
triage_label:
|
||||
if: contains(github.event.issue.labels.*.name, 'awaiting_response')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: initial labeling
|
||||
uses: andymckay/labeler@master
|
||||
with:
|
||||
add-labels: "triage"
|
||||
remove-labels: "awaiting_response"
|
||||
2
.github/workflows/version-bump.yml
vendored
2
.github/workflows/version-bump.yml
vendored
@@ -107,3 +107,5 @@ jobs:
|
||||
base: ${{github.ref}}
|
||||
title: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
|
||||
@@ -21,7 +21,7 @@ repos:
|
||||
- "markdown"
|
||||
- id: check-case-conflict
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 21.12b0
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
@@ -43,7 +43,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.782
|
||||
rev: v0.942
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
303
CHANGELOG.md
303
CHANGELOG.md
@@ -3,312 +3,15 @@
|
||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](CONTRIBUTING.md)
|
||||
|
||||
|
||||
## dbt-core 1.1.0 (TBD)
|
||||
|
||||
### Features
|
||||
- Added Support for Semantic Versioning ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
||||
- New Dockerfile to support specific db adapters and platforms. See docker/README.md for details ([#4495](https://github.com/dbt-labs/dbt-core/issues/4495), [#4487](https://github.com/dbt-labs/dbt-core/pull/4487))
|
||||
- Allow unique_key to take a list ([#2479](https://github.com/dbt-labs/dbt-core/issues/2479), [#4618](https://github.com/dbt-labs/dbt-core/pull/4618))
|
||||
- Add `--quiet` global flag and `print` Jinja function ([#3451](https://github.com/dbt-labs/dbt-core/issues/3451), [#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
||||
|
||||
### Fixes
|
||||
- User wasn't asked for permission to overwite a profile entry when running init inside an existing project ([#4375](https://github.com/dbt-labs/dbt-core/issues/4375), [#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
||||
- Allow override of string and numeric types for adapters. ([#4603](https://github.com/dbt-labs/dbt-core/issues/4603))
|
||||
- A change in secret environment variables won't trigger a full reparse [#4650](https://github.com/dbt-labs/dbt-core/issues/4650) [4665](https://github.com/dbt-labs/dbt-core/pull/4665)
|
||||
- Fix misspellings and typos in docstrings ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
||||
|
||||
### Under the hood
|
||||
- Testing cleanup ([#4496](https://github.com/dbt-labs/dbt-core/pull/4496), [#4509](https://github.com/dbt-labs/dbt-core/pull/4509))
|
||||
- Clean up test deprecation warnings ([#3988](https://github.com/dbt-labs/dbt-core/issue/3988), [#4556](https://github.com/dbt-labs/dbt-core/pull/4556))
|
||||
- Use mashumaro for serialization in event logging ([#4504](https://github.com/dbt-labs/dbt-core/issues/4504), [#4505](https://github.com/dbt-labs/dbt-core/pull/4505))
|
||||
- Drop support for Python 3.7.0 + 3.7.1 ([#4584](https://github.com/dbt-labs/dbt-core/issues/4584), [#4585](https://github.com/dbt-labs/dbt-core/pull/4585), [#4643](https://github.com/dbt-labs/dbt-core/pull/4643))
|
||||
- Re-format codebase (except tests) using pre-commit hooks ([#3195](https://github.com/dbt-labs/dbt-core/issues/3195), [#4697](https://github.com/dbt-labs/dbt-core/pull/4697))
|
||||
- Add deps module README ([#4686](https://github.com/dbt-labs/dbt-core/pull/4686/))
|
||||
- Initial conversion of tests to pytest ([#4690](https://github.com/dbt-labs/dbt-core/issues/4690), [#4691](https://github.com/dbt-labs/dbt-core/pull/4691))
|
||||
- Fix errors in Windows for tests/functions ([#4781](https://github.com/dbt-labs/dbt-core/issues/4781), [#4767](https://github.com/dbt-labs/dbt-core/pull/4767))
|
||||
|
||||
Contributors:
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||
- [@alswang18](https://github.com/alswang18) ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
||||
- [@emartens](https://github.com/ehmartens) ([#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
||||
- [@mdesmet](https://github.com/mdesmet) ([#4604](https://github.com/dbt-labs/dbt-core/pull/4604))
|
||||
- [@kazanzhy](https://github.com/kazanzhy) ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
||||
|
||||
|
||||
## dbt-core 1.0.4 (TBD)
|
||||
|
||||
### Fixes
|
||||
- Fix bug causing empty node level meta, snapshot config errors ([#4459](https://github.com/dbt-labs/dbt-core/issues/4459), [#4726](https://github.com/dbt-labs/dbt-core/pull/4726))
|
||||
- Fix slow `dbt run` when using Postgres adapter, by deduplicating relations in `postgres_get_relations` ([#3058](https://github.com/dbt-labs/dbt-core/issues/3058), [#4521](https://github.com/dbt-labs/dbt-core/pull/4521))
|
||||
- Fix partial parsing bug with multiple snapshot blocks ([#4771](https//github.com/dbt-labs/dbt-core/issues/4772), [#4773](https://github.com/dbt-labs/dbt-core/pull/4773))
|
||||
- Fix lack of color output on Linux and MacOS when piping the output into another process using the shell pipe (`|`) [#4792](https://github.com/dbt-labs/dbt-core/pull/4792)
|
||||
- Fixed a bug where nodes that depend on multiple macros couldn't be selected using `-s state:modified` ([#4678](https://github.com/dbt-labs/dbt-core/issues/4678))
|
||||
|
||||
Contributors:
|
||||
- [@varun-dc ](https://github.com/varun-dc) ([#4792](https://github.com/dbt-labs/dbt-core/pull/4792))
|
||||
|
||||
### Docs
|
||||
- Resolve errors related to operations preventing DAG from generating in the docs. Also patch a spark issue to allow search to filter accurately past the missing columns. ([#4578](https://github.com/dbt-labs/dbt-core/issues/4578), [#4763](https://github.com/dbt-labs/dbt-core/pull/4763))
|
||||
|
||||
## dbt-core 1.0.3 (TBD)
|
||||
|
||||
### Fixes
|
||||
- Fix bug accessing target fields in deps and clean commands ([#4752](https://github.com/dbt-labs/dbt-core/issues/4752), [#4758](https://github.com/dbt-labs/dbt-core/issues/4758))
|
||||
|
||||
## dbt-core 1.0.2 (TBD)
|
||||
|
||||
### Fixes
|
||||
- Projects created using `dbt init` now have the correct `seeds` directory created (instead of `data`) ([#4588](https://github.com/dbt-labs/dbt-core/issues/4588), [#4599](https://github.com/dbt-labs/dbt-core/pull/4589))
|
||||
- Don't require a profile for dbt deps and clean commands ([#4554](https://github.com/dbt-labs/dbt-core/issues/4554), [#4610](https://github.com/dbt-labs/dbt-core/pull/4610))
|
||||
- Select modified.body works correctly when new model added([#4570](https://github.com/dbt-labs/dbt-core/issues/4570), [#4631](https://github.com/dbt-labs/dbt-core/pull/4631))
|
||||
- Fix bug in retry logic for bad response from hub and when there is a bad git tarball download. ([#4577](https://github.com/dbt-labs/dbt-core/issues/4577), [#4579](https://github.com/dbt-labs/dbt-core/issues/4579), [#4609](https://github.com/dbt-labs/dbt-core/pull/4609))
|
||||
- Restore previous log level (DEBUG) when a test depends on a disabled resource. Still WARN if the resource is missing ([#4594](https://github.com/dbt-labs/dbt-core/issues/4594), [#4647](https://github.com/dbt-labs/dbt-core/pull/4647))
|
||||
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
||||
- Support click versions in the v7.x series ([#4681](https://github.com/dbt-labs/dbt-core/pull/4681))
|
||||
|
||||
Contributors:
|
||||
* [@amirkdv](https://github.com/amirkdv) ([#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
||||
* [@twilly](https://github.com/twilly) ([#4681](https://github.com/dbt-labs/dbt-core/pull/4681))
|
||||
|
||||
## dbt-core 1.0.2 (TBD)
|
||||
### Fixes
|
||||
- adapter compability messaging added([#4438](https://github.com/dbt-labs/dbt-core/pull/4438) [#4565](https://github.com/dbt-labs/dbt-core/pull/4565))
|
||||
|
||||
Contributors:
|
||||
* [@nkyuray](https://github.com/nkyuray) ([#4565](https://github.com/dbt-labs/dbt-core/pull/4565))
|
||||
## dbt-core 1.0.1 (January 03, 2022)
|
||||
|
||||
|
||||
## dbt-core 1.0.1rc1 (December 20, 2021)
|
||||
|
||||
### Fixes
|
||||
- Fix wrong url in the dbt docs overview homepage ([#4442](https://github.com/dbt-labs/dbt-core/pull/4442))
|
||||
- Fix redefined status param of SQLQueryStatus to typecheck the string which passes on `._message` value of `AdapterResponse` or the `str` value sent by adapter plugin. ([#4463](https://github.com/dbt-labs/dbt-core/pull/4463#issuecomment-990174166))
|
||||
- Fix `DepsStartPackageInstall` event to use package name instead of version number. ([#4482](https://github.com/dbt-labs/dbt-core/pull/4482))
|
||||
- Reimplement log message to use adapter name instead of the object method. ([#4501](https://github.com/dbt-labs/dbt-core/pull/4501))
|
||||
- Issue better error message for incompatible schemas ([#4470](https://github.com/dbt-labs/dbt-core/pull/4442), [#4497](https://github.com/dbt-labs/dbt-core/pull/4497))
|
||||
- Remove secrets from error related to packages. ([#4507](https://github.com/dbt-labs/dbt-core/pull/4507))
|
||||
- Prevent coercion of boolean values (`True`, `False`) to numeric values (`0`, `1`) in query results ([#4511](https://github.com/dbt-labs/dbt-core/issues/4511), [#4512](https://github.com/dbt-labs/dbt-core/pull/4512))
|
||||
- Fix error with an env_var in a project hook ([#4523](https://github.com/dbt-labs/dbt-core/issues/4523), [#4524](https://github.com/dbt-labs/dbt-core/pull/4524))
|
||||
- Add additional windows compat logic for colored log output. ([#4443](https://github.com/dbt-labs/dbt-core/issues/4443))
|
||||
|
||||
### Docs
|
||||
- Fix missing data on exposures in docs ([#4467](https://github.com/dbt-labs/dbt-core/issues/4467))
|
||||
|
||||
Contributors:
|
||||
- [@remoyson](https://github.com/remoyson) ([#4442](https://github.com/dbt-labs/dbt-core/pull/4442))
|
||||
|
||||
## dbt-core 1.0.0 (December 3, 2021)
|
||||
|
||||
### Fixes
|
||||
- Configure the CLI logger destination to use stdout instead of stderr ([#4368](https://github.com/dbt-labs/dbt-core/pull/4368))
|
||||
- Make the size of `EVENT_HISTORY` configurable, via `EVENT_BUFFER_SIZE` global config ([#4411](https://github.com/dbt-labs/dbt-core/pull/4411), [#4416](https://github.com/dbt-labs/dbt-core/pull/4416))
|
||||
- Change type of `log_format` in `profiles.yml` user config to be string, not boolean ([#4394](https://github.com/dbt-labs/dbt-core/pull/4394))
|
||||
|
||||
### Under the hood
|
||||
- Only log cache events if `LOG_CACHE_EVENTS` is enabled, and disable by default. This restores previous behavior ([#4369](https://github.com/dbt-labs/dbt-core/pull/4369))
|
||||
- Move event codes to be a top-level attribute of JSON-formatted logs, rather than nested in `data` ([#4381](https://github.com/dbt-labs/dbt-core/pull/4381))
|
||||
- Fix failing integration test on Windows ([#4380](https://github.com/dbt-labs/dbt-core/pull/4380))
|
||||
- Clean up warning messages for `clean` + `deps` ([#4366](https://github.com/dbt-labs/dbt-core/pull/4366))
|
||||
- Use RFC3339 timestamps for log messages ([#4384](https://github.com/dbt-labs/dbt-core/pull/4384))
|
||||
- Different text output for console (info) and file (debug) logs ([#4379](https://github.com/dbt-labs/dbt-core/pull/4379), [#4418](https://github.com/dbt-labs/dbt-core/pull/4418))
|
||||
- Remove unused events. More structured `ConcurrencyLine`. Replace `\n` message starts/ends with `EmptyLine` events, and exclude `EmptyLine` from JSON-formatted output ([#4388](https://github.com/dbt-labs/dbt-core/pull/4388))
|
||||
- Update `events` module README ([#4395](https://github.com/dbt-labs/dbt-core/pull/4395))
|
||||
- Rework approach to JSON serialization for events with non-standard properties ([#4396](https://github.com/dbt-labs/dbt-core/pull/4396))
|
||||
- Update legacy logger file name to `dbt.log.legacy` ([#4402](https://github.com/dbt-labs/dbt-core/pull/4402))
|
||||
- Rollover `dbt.log` at 10 MB, and keep up to 5 backups, restoring previous behavior ([#4405](https://github.com/dbt-labs/dbt-core/pull/4405))
|
||||
- Use reference keys instead of full relation objects in cache events ([#4410](https://github.com/dbt-labs/dbt-core/pull/4410))
|
||||
- Add `node_type` contextual info to more events ([#4378](https://github.com/dbt-labs/dbt-core/pull/4378))
|
||||
- Make `materialized` config optional in `node_type` ([#4417](https://github.com/dbt-labs/dbt-core/pull/4417))
|
||||
- Stringify exception in `GenericExceptionOnRun` to support JSON serialization ([#4424](https://github.com/dbt-labs/dbt-core/pull/4424))
|
||||
- Add "interop" tests for machine consumption of structured log output ([#4327](https://github.com/dbt-labs/dbt-core/pull/4327))
|
||||
- Relax version specifier for `dbt-extractor` to `~=0.4.0`, to support compiled wheels for additional architectures when available ([#4427](https://github.com/dbt-labs/dbt-core/pull/4427))
|
||||
|
||||
## dbt-core 1.0.0rc3 (November 30, 2021)
|
||||
|
||||
### Fixes
|
||||
- Support partial parsing of env_vars in metrics ([#4253](https://github.com/dbt-labs/dbt-core/issues/4293), [#4322](https://github.com/dbt-labs/dbt-core/pull/4322))
|
||||
- Fix typo in `UnparsedSourceDefinition.__post_serialize__` ([#3545](https://github.com/dbt-labs/dbt-core/issues/3545), [#4349](https://github.com/dbt-labs/dbt-core/pull/4349))
|
||||
|
||||
### Under the hood
|
||||
- Change some CompilationExceptions to ParsingExceptions ([#4254](http://github.com/dbt-labs/dbt-core/issues/4254), [#4328](https://github.com/dbt-core/pull/4328))
|
||||
- Reorder logic for static parser sampling to speed up model parsing ([#4332](https://github.com/dbt-labs/dbt-core/pull/4332))
|
||||
- Use more augmented assignment statements ([#4315](https://github.com/dbt-labs/dbt-core/issues/4315)), ([#4311](https://github.com/dbt-labs/dbt-core/pull/4331))
|
||||
- Adjust logic when finding approximate matches for models and tests ([#3835](https://github.com/dbt-labs/dbt-core/issues/3835)), [#4076](https://github.com/dbt-labs/dbt-core/pull/4076))
|
||||
- Restore small previous behaviors for logging: JSON formatting for first few events; `WARN`-level stdout for `list` task; include tracking events in `dbt.log` ([#4341](https://github.com/dbt-labs/dbt-core/pull/4341))
|
||||
|
||||
Contributors:
|
||||
- [@sarah-weatherbee](https://github.com/sarah-weatherbee) ([#4331](https://github.com/dbt-labs/dbt-core/pull/4331))
|
||||
- [@emilieschario](https://github.com/emilieschario) ([#4076](https://github.com/dbt-labs/dbt-core/pull/4076))
|
||||
- [@sneznaj](https://github.com/sneznaj) ([#4349](https://github.com/dbt-labs/dbt-core/pull/4349))
|
||||
|
||||
## dbt-core 1.0.0rc2 (November 22, 2021)
|
||||
|
||||
### Breaking changes
|
||||
- Restrict secret env vars (prefixed `DBT_ENV_SECRET_`) to `profiles.yml` + `packages.yml` _only_. Raise an exception if a secret env var is used elsewhere ([#4310](https://github.com/dbt-labs/dbt-core/issues/4310), [#4311](https://github.com/dbt-labs/dbt-core/pull/4311))
|
||||
- Reorder arguments to `config.get()` so that `default` is second ([#4273](https://github.com/dbt-labs/dbt-core/issues/4273), [#4297](https://github.com/dbt-labs/dbt-core/pull/4297))
|
||||
|
||||
### Features
|
||||
- Avoid error when missing column in YAML description ([#4151](https://github.com/dbt-labs/dbt-core/issues/4151), [#4285](https://github.com/dbt-labs/dbt-core/pull/4285))
|
||||
- Allow `--defer` flag to `dbt snapshot` ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4296](https://github.com/dbt-labs/dbt-core/pull/4296))
|
||||
- Install prerelease packages when `version` explicitly references a prerelease version, regardless of `install-prerelease` status ([#4243](https://github.com/dbt-labs/dbt-core/issues/4243), [#4295](https://github.com/dbt-labs/dbt-core/pull/4295))
|
||||
- Add data attributes to json log messages ([#4301](https://github.com/dbt-labs/dbt-core/pull/4301))
|
||||
- Add event codes to all log events ([#4319](https://github.com/dbt-labs/dbt-core/pull/4319))
|
||||
|
||||
### Fixes
|
||||
- Fix serialization error with missing quotes in metrics model ref ([#4252](https://github.com/dbt-labs/dbt-core/issues/4252), [#4287](https://github.com/dbt-labs/dbt-core/pull/4289))
|
||||
- Correct definition of 'created_at' in ParsedMetric nodes ([#4298](http://github.com/dbt-labs/dbt-core/issues/4298), [#4299](https://github.com/dbt-labs/dbt-core/pull/4299))
|
||||
|
||||
### Fixes
|
||||
- Allow specifying default in Jinja config.get with default keyword ([#4273](https://github.com/dbt-labs/dbt-core/issues/4273), [#4297](https://github.com/dbt-labs/dbt-core/pull/4297))
|
||||
- Fix serialization error with missing quotes in metrics model ref ([#4252](https://github.com/dbt-labs/dbt-core/issues/4252), [#4287](https://github.com/dbt-labs/dbt-core/pull/4289))
|
||||
- Correct definition of 'created_at' in ParsedMetric nodes ([#4298](https://github.com/dbt-labs/dbt-core/issues/4298), [#4299](https://github.com/dbt-labs/dbt-core/pull/4299))
|
||||
|
||||
### Under the hood
|
||||
- Add --indirect-selection parameter to profiles.yml and builtin DBT_ env vars; stringified parameter to enable multi-modal use ([#3997](https://github.com/dbt-labs/dbt-core/issues/3997), [#4270](https://github.com/dbt-labs/dbt-core/pull/4270))
|
||||
- Fix filesystem searcher test failure on Python 3.9 ([#3689](https://github.com/dbt-labs/dbt-core/issues/3689), [#4271](https://github.com/dbt-labs/dbt-core/pull/4271))
|
||||
- Clean up deprecation warnings shown for `dbt_project.yml` config renames ([#4276](https://github.com/dbt-labs/dbt-core/issues/4276), [#4291](https://github.com/dbt-labs/dbt-core/pull/4291))
|
||||
- Fix metrics count in compiled project stats ([#4290](https://github.com/dbt-labs/dbt-core/issues/4290), [#4292](https://github.com/dbt-labs/dbt-core/pull/4292))
|
||||
- First pass at supporting more dbt tasks via python lib ([#4200](https://github.com/dbt-labs/dbt-core/pull/4200))
|
||||
|
||||
Contributors:
|
||||
- [@kadero](https://github.com/kadero) ([#4285](https://github.com/dbt-labs/dbt-core/pull/4285), [#4296](https://github.com/dbt-labs/dbt-core/pull/4296))
|
||||
- [@joellabes](https://github.com/joellabes) ([#4295](https://github.com/dbt-labs/dbt-core/pull/4295))
|
||||
|
||||
## dbt-core 1.0.0rc1 (November 10, 2021)
|
||||
|
||||
### Breaking changes
|
||||
- Replace `greedy` flag/property for test selection with `indirect_selection: eager/cautious` flag/property. Set to `eager` by default. **Note:** This reverts test selection to its pre-v0.20 behavior by default. `dbt test -s my_model` _will_ select multi-parent tests, such as `relationships`, that depend on unselected resources. To achieve the behavior change in v0.20 + v0.21, set `--indirect-selection=cautious` on the CLI or `indirect_selection: cautious` in yaml selectors. ([#4082](https://github.com/dbt-labs/dbt-core/issues/4082), [#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
||||
- In v1.0.0, **`pip install dbt` will raise an explicit error.** Instead, please use `pip install dbt-<adapter>` (to use dbt with that database adapter), or `pip install dbt-core` (for core functionality). For parity with the previous behavior of `pip install dbt`, you can use: `pip install dbt-core dbt-postgres dbt-redshift dbt-snowflake dbt-bigquery` ([#4100](https://github.com/dbt-labs/dbt-core/issues/4100), [#4133](https://github.com/dbt-labs/dbt-core/pull/4133))
|
||||
- Reorganize the `global_project` (macros) into smaller files with clearer names. Remove unused global macros: `column_list`, `column_list_for_create_table`, `incremental_upsert` ([#4154](https://github.com/dbt-labs/dbt-core/pull/4154))
|
||||
- Introduce structured event interface, and begin conversion of all legacy logging ([#3359](https://github.com/dbt-labs/dbt-core/issues/3359), [#4055](https://github.com/dbt-labs/dbt-core/pull/4055))
|
||||
- **This is a breaking change for adapter plugins, requiring a very simple migration.** See [`events` module README](core/dbt/events/README.md#adapter-maintainers) for details.
|
||||
- If you maintain another kind of dbt-core plugin that makes heavy use of legacy logging, and you need time to cut over to the new event interface, you can re-enable the legacy logger via an environment variable shim, `DBT_ENABLE_LEGACY_LOGGER=True`. Be advised that we will remove this capability in a future version of dbt-core.
|
||||
|
||||
### Features
|
||||
- Allow nullable `error_after` in source freshness ([#3874](https://github.com/dbt-labs/dbt-core/issues/3874), [#3955](https://github.com/dbt-labs/dbt-core/pull/3955))
|
||||
- Add `metrics` nodes ([#4071](https://github.com/dbt-labs/dbt-core/issues/4071), [#4235](https://github.com/dbt-labs/dbt-core/pull/4235))
|
||||
- Add support for `dbt init <project_name>`, and support for `skip_profile_setup` argument (`dbt init -s`) ([#4156](https://github.com/dbt-labs/dbt-core/issues/4156), [#4249](https://github.com/dbt-labs/dbt-core/pull/4249))
|
||||
|
||||
### Fixes
|
||||
- Changes unit tests using `assertRaisesRegexp` to `assertRaisesRegex` ([#4136](https://github.com/dbt-labs/dbt-core/issues/4132), [#4136](https://github.com/dbt-labs/dbt-core/pull/4136))
|
||||
- Allow retries when the answer from a `dbt deps` is `None` ([#4178](https://github.com/dbt-labs/dbt-core/issues/4178), [#4225](https://github.com/dbt-labs/dbt-core/pull/4225))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix non-alphabetical sort of Source Tables in source overview page ([docs#81](https://github.com/dbt-labs/dbt-docs/issues/81), [docs#218](https://github.com/dbt-labs/dbt-docs/pull/218))
|
||||
- Add title tag to node elements in tree ([docs#202](https://github.com/dbt-labs/dbt-docs/issues/202), [docs#203](https://github.com/dbt-labs/dbt-docs/pull/203))
|
||||
- Account for test rename: `schema` → `generic`, `data` →` singular`. Use `test_metadata` instead of `schema`/`data` tags to differentiate ([docs#216](https://github.com/dbt-labs/dbt-docs/issues/216), [docs#222](https://github.com/dbt-labs/dbt-docs/pull/222))
|
||||
- Add `metrics` ([core#216](https://github.com/dbt-labs/dbt-core/issues/4235), [docs#223](https://github.com/dbt-labs/dbt-docs/pull/223))
|
||||
|
||||
### Under the hood
|
||||
- Bump artifact schema versions for 1.0.0: manifest v4, run results v4, sources v3. Notable changes: added `metrics` nodes; schema test + data test nodes are renamed to generic test + singular test nodes; freshness threshold default values ([#4191](https://github.com/dbt-labs/dbt-core/pull/4191))
|
||||
- Speed up node selection by skipping `incorporate_indirect_nodes` if not needed ([#4213](https://github.com/dbt-labs/dbt-core/issues/4213), [#4214](https://github.com/dbt-labs/dbt-core/issues/4214))
|
||||
- When `on_schema_change` is set, pass common columns as `dest_columns` in incremental merge macros ([#4144](https://github.com/dbt-labs/dbt-core/issues/4144), [#4170](https://github.com/dbt-labs/dbt-core/pull/4170))
|
||||
- Clear adapters before registering in `lib` module config generation ([#4218](https://github.com/dbt-labs/dbt-core/pull/4218))
|
||||
- Remove official support for python 3.6, which is reaching end of life on December 23, 2021 ([#4134](https://github.com/dbt-labs/dbt-core/issues/4134), [#4223](https://github.com/dbt-labs/dbt-core/pull/4223))
|
||||
|
||||
Contributors:
|
||||
- [@kadero](https://github.com/kadero) ([#3955](https://github.com/dbt-labs/dbt-core/pull/3955), [#4249](https://github.com/dbt-labs/dbt-core/pull/4249))
|
||||
- [@frankcash](https://github.com/frankcash) ([#4136](https://github.com/dbt-labs/dbt-core/pull/4136))
|
||||
- [@Kayrnt](https://github.com/Kayrnt) ([#4136](https://github.com/dbt-labs/dbt-core/pull/4170))
|
||||
- [@VersusFacit](https://github.com/VersusFacit) ([#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
||||
- [@joellabes](https://github.com/joellabes) ([#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
||||
- [@b-per](https://github.com/b-per) ([#4225](https://github.com/dbt-labs/dbt-core/pull/4225))
|
||||
- [@salmonsd](https://github.com/salmonsd) ([docs#218](https://github.com/dbt-labs/dbt-docs/pull/218))
|
||||
- [@miike](https://github.com/miike) ([docs#203](https://github.com/dbt-labs/dbt-docs/pull/203))
|
||||
|
||||
|
||||
## dbt-core 1.0.0b2 (October 25, 2021)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Enable `on-run-start` and `on-run-end` hooks for `dbt test`. Add `flags.WHICH` to execution context, representing current task ([#3463](https://github.com/dbt-labs/dbt-core/issues/3463), [#4004](https://github.com/dbt-labs/dbt-core/pull/4004))
|
||||
|
||||
### Features
|
||||
- Normalize global CLI arguments/flags ([#2990](https://github.com/dbt-labs/dbt/issues/2990), [#3839](https://github.com/dbt-labs/dbt/pull/3839))
|
||||
- Turns on the static parser by default and adds the flag `--no-static-parser` to disable it. ([#3377](https://github.com/dbt-labs/dbt/issues/3377), [#3939](https://github.com/dbt-labs/dbt/pull/3939))
|
||||
- Generic test FQNs have changed to include the relative path, resource, and column (if applicable) where they are defined. This makes it easier to configure them from the `tests` block in `dbt_project.yml` ([#3259](https://github.com/dbt-labs/dbt/pull/3259), [#3880](https://github.com/dbt-labs/dbt/pull/3880)
|
||||
- Turn on partial parsing by default ([#3867](https://github.com/dbt-labs/dbt/issues/3867), [#3989](https://github.com/dbt-labs/dbt/issues/3989))
|
||||
- Add `result:<status>` selectors to automatically rerun failed tests and erroneous models. This makes it easier to rerun failed dbt jobs with a simple selector flag instead of restarting from the beginning or manually running the dbt models in scope. ([#3859](https://github.com/dbt-labs/dbt/issues/3891), [#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
||||
- `dbt init` is now interactive, generating profiles.yml when run inside existing project ([#3625](https://github.com/dbt-labs/dbt/pull/3625))
|
||||
|
||||
### Under the hood
|
||||
|
||||
- Fix intermittent errors in partial parsing tests ([#4060](https://github.com/dbt-labs/dbt-core/issues/4060), [#4068](https://github.com/dbt-labs/dbt-core/pull/4068))
|
||||
- Make finding disabled nodes more consistent ([#4069](https://github.com/dbt-labs/dbt-core/issues/4069), [#4073](https://github.com/dbt-labas/dbt-core/pull/4073))
|
||||
- Remove connection from `render_with_context` during parsing, thereby removing misleading log message ([#3137](https://github.com/dbt-labs/dbt-core/issues/3137), [#4062](https://github.com/dbt-labas/dbt-core/pull/4062))
|
||||
- Wait for postgres docker container to be ready in `setup_db.sh`. ([#3876](https://github.com/dbt-labs/dbt-core/issues/3876), [#3908](https://github.com/dbt-labs/dbt-core/pull/3908))
|
||||
- Prefer macros defined in the project over the ones in a package by default ([#4106](https://github.com/dbt-labs/dbt-core/issues/4106), [#4114](https://github.com/dbt-labs/dbt-core/pull/4114))
|
||||
- Dependency updates ([#4079](https://github.com/dbt-labs/dbt-core/pull/4079)), ([#3532](https://github.com/dbt-labs/dbt-core/pull/3532)
|
||||
- Schedule partial parsing for SQL files with env_var changes ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4101](https://github.com/dbt-labs/dbt-core/pull/4101))
|
||||
- Schedule partial parsing for schema files with env_var changes ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4162](https://github.com/dbt-labs/dbt-core/pull/4162))
|
||||
- Skip partial parsing when env_vars change in dbt_project or profile ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4212](https://github.com/dbt-labs/dbt-core/pull/4212))
|
||||
|
||||
Contributors:
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
||||
- [@matt-winkler](https://github.com/matt-winkler) ([#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#3625](https://github.com/dbt-labs/dbt/pull/3625))
|
||||
- [@rvacaru](https://github.com/rvacaru) ([#3908](https://github.com/dbt-labs/dbt/pull/3908))
|
||||
- [@JCZuurmond](https://github.com/jczuurmond) ([#4114](https://github.com/dbt-labs/dbt-core/pull/4114))
|
||||
- [@ljhopkins2](https://github.com/dbt-labs/dbt-core/pull/4079)
|
||||
|
||||
## dbt-core 1.0.0b1 (October 11, 2021)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- The two type of test definitions are now "singular" and "generic" (instead of "data" and "schema", respectively). The `test_type:` selection method accepts `test_type:singular` and `test_type:generic`. (It will also accept `test_type:schema` and `test_type:data` for backwards compatibility) ([#3234](https://github.com/dbt-labs/dbt-core/issues/3234), [#3880](https://github.com/dbt-labs/dbt-core/pull/3880)). **Not backwards compatible:** The `--data` and `--schema` flags to `dbt test` are no longer supported, and tests no longer have the tags `'data'` and `'schema'` automatically applied.
|
||||
- Deprecated the use of the `packages` arg `adapter.dispatch` in favor of the `macro_namespace` arg. ([#3895](https://github.com/dbt-labs/dbt-core/issues/3895))
|
||||
|
||||
### Features
|
||||
- Normalize global CLI arguments/flags ([#2990](https://github.com/dbt-labs/dbt-core/issues/2990), [#3839](https://github.com/dbt-labs/dbt-core/pull/3839))
|
||||
- Turns on the static parser by default and adds the flag `--no-static-parser` to disable it. ([#3377](https://github.com/dbt-labs/dbt-core/issues/3377), [#3939](https://github.com/dbt-labs/dbt-core/pull/3939))
|
||||
- Generic test FQNs have changed to include the relative path, resource, and column (if applicable) where they are defined. This makes it easier to configure them from the `tests` block in `dbt_project.yml` ([#3259](https://github.com/dbt-labs/dbt-core/pull/3259), [#3880](https://github.com/dbt-labs/dbt-core/pull/3880)
|
||||
- Turn on partial parsing by default ([#3867](https://github.com/dbt-labs/dbt-core/issues/3867), [#3989](https://github.com/dbt-labs/dbt-core/issues/3989))
|
||||
- Generic test can now be added under a `generic` subfolder in the `test-paths` directory. ([#4052](https://github.com/dbt-labs/dbt-core/pull/4052))
|
||||
|
||||
### Fixes
|
||||
- Add generic tests defined on sources to the manifest once, not twice ([#3347](https://github.com/dbt-labs/dbt/issues/3347), [#3880](https://github.com/dbt-labs/dbt/pull/3880))
|
||||
- Skip partial parsing if certain macros have changed ([#3810](https://github.com/dbt-labs/dbt/issues/3810), [#3982](https://github.com/dbt-labs/dbt/pull/3892))
|
||||
- Enable cataloging of unlogged Postgres tables ([3961](https://github.com/dbt-labs/dbt/issues/3961), [#3993](https://github.com/dbt-labs/dbt/pull/3993))
|
||||
- Fix multiple disabled nodes ([#4013](https://github.com/dbt-labs/dbt/issues/4013), [#4018](https://github.com/dbt-labs/dbt/pull/4018))
|
||||
- Fix multiple partial parsing errors ([#3996](https://github.com/dbt-labs/dbt/issues/3006), [#4020](https://github.com/dbt-labs/dbt/pull/4018))
|
||||
- Return an error instead of a warning when runing with `--warn-error` and no models are selected ([#4006](https://github.com/dbt-labs/dbt/issues/4006), [#4019](https://github.com/dbt-labs/dbt/pull/4019))
|
||||
- Fixed bug with `error_if` test option ([#4070](https://github.com/dbt-labs/dbt-core/pull/4070))
|
||||
|
||||
### Under the hood
|
||||
- Enact deprecation for `materialization-return` and replace deprecation warning with an exception. ([#3896](https://github.com/dbt-labs/dbt-core/issues/3896))
|
||||
- Build catalog for only relational, non-ephemeral nodes in the graph ([#3920](https://github.com/dbt-labs/dbt-core/issues/3920))
|
||||
- Enact deprecation to remove the `release` arg from the `execute_macro` method. ([#3900](https://github.com/dbt-labs/dbt-core/issues/3900))
|
||||
- Enact deprecation for default quoting to be True. Override for the `dbt-snowflake` adapter so it stays `False`. ([#3898](https://github.com/dbt-labs/dbt-core/issues/3898))
|
||||
- Enact deprecation for object used as dictionaries when they should be dataclasses. Replace deprecation warning with an exception for the dunder methods of `__iter__` and `__len__` for all superclasses of FakeAPIObject. ([#3897](https://github.com/dbt-labs/dbt-core/issues/3897))
|
||||
- Enact deprecation for `adapter-macro` and replace deprecation warning with an exception. ([#3901](https://github.com/dbt-labs/dbt-core/issues/3901))
|
||||
- Add warning when trying to put a node under the wrong key. ie. A seed under models in a `schema.yml` file. ([#3899](https://github.com/dbt-labs/dbt-core/issues/3899))
|
||||
- Plugins for `redshift`, `snowflake`, and `bigquery` have moved to separate repos: [`dbt-redshift`](https://github.com/dbt-labs/dbt-redshift), [`dbt-snowflake`](https://github.com/dbt-labs/dbt-snowflake), [`dbt-bigquery`](https://github.com/dbt-labs/dbt-bigquery)
|
||||
- Change the default dbt packages installation directory to `dbt_packages` from `dbt_modules`. Also rename `module-path` to `packages-install-path` to allow default overrides of package install directory. Deprecation warning added for projects using the old `dbt_modules` name without specifying a `packages-install-path`. ([#3523](https://github.com/dbt-labs/dbt-core/issues/3523))
|
||||
- Update the default project paths to be `analysis-paths = ['analyses']` and `test-paths = ['tests]`. Also have starter project set `analysis-paths: ['analyses']` from now on. ([#2659](https://github.com/dbt-labs/dbt-core/issues/2659))
|
||||
- Define the data type of `sources` as an array of arrays of string in the manifest artifacts. ([#3966](https://github.com/dbt-labs/dbt-core/issues/3966), [#3967](https://github.com/dbt-labs/dbt-core/pull/3967))
|
||||
- Marked `source-paths` and `data-paths` as deprecated keys in `dbt_project.yml` in favor of `model-paths` and `seed-paths` respectively.([#1607](https://github.com/dbt-labs/dbt-core/issues/1607))
|
||||
- Surface git errors to `stdout` when cloning dbt packages from Github. ([#3167](https://github.com/dbt-labs/dbt-core/issues/3167))
|
||||
|
||||
Contributors:
|
||||
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#3920](https://github.com/dbt-labs/dbt-core/pull/3922))
|
||||
- [@kadero](https://github.com/kadero) ([#3952](https://github.com/dbt-labs/dbt-core/pull/3953))
|
||||
- [@samlader](https://github.com/samlader) ([#3993](https://github.com/dbt-labs/dbt-core/pull/3993))
|
||||
- [@yu-iskw](https://github.com/yu-iskw) ([#3967](https://github.com/dbt-labs/dbt-core/pull/3967))
|
||||
- [@laxjesse](https://github.com/laxjesse) ([#4019](https://github.com/dbt-labs/dbt-core/pull/4019))
|
||||
- [@gitznik](https://github.com/Gitznik) ([#4124](https://github.com/dbt-labs/dbt-core/pull/4124))
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
||||
|
||||
162
CONTRIBUTING.md
162
CONTRIBUTING.md
@@ -1,79 +1,27 @@
|
||||
# Contributing to `dbt`
|
||||
# Contributing to `dbt-core`
|
||||
|
||||
`dbt-core` is open source software. It is what it is today because community members have opened issues, provided feedback, and [contributed to the knowledge loop](https://www.getdbt.com/dbt-labs/values/). Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||
|
||||
1. [About this document](#about-this-document)
|
||||
2. [Proposing a change](#proposing-a-change)
|
||||
3. [Getting the code](#getting-the-code)
|
||||
4. [Setting up an environment](#setting-up-an-environment)
|
||||
5. [Running `dbt` in development](#running-dbt-in-development)
|
||||
6. [Testing](#testing)
|
||||
7. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
2. [Getting the code](#getting-the-code)
|
||||
3. [Setting up an environment](#setting-up-an-environment)
|
||||
4. [Running `dbt` in development](#running-dbt-core-in-development)
|
||||
5. [Testing dbt-core](#testing)
|
||||
6. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
|
||||
## About this document
|
||||
|
||||
This document is a guide intended for folks interested in contributing to `dbt-core`. Below, we document the process by which members of the community should create issues and submit pull requests (PRs) in this repository. It is not intended as a guide for using `dbt-core`, and it assumes a certain level of familiarity with Python concepts such as virtualenvs, `pip`, python modules, filesystems, and so on. This guide assumes you are using macOS or Linux and are comfortable with the command line.
|
||||
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
||||
|
||||
If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the `#dbt-core-development` channel on [slack](https://community.getdbt.com).
|
||||
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||
|
||||
#### Adapters
|
||||
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
||||
|
||||
If you have an issue or code change suggestion related to a specific database [adapter](https://docs.getdbt.com/docs/available-adapters); please refer to that supported databases seperate repo for those contributions.
|
||||
### Notes
|
||||
|
||||
### Signing the CLA
|
||||
|
||||
Please note that all contributors to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the `dbt-core` codebase. If you are unable to sign the CLA, then the `dbt-core` maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones.
|
||||
|
||||
## Proposing a change
|
||||
|
||||
`dbt-core` is Apache 2.0-licensed open source software. `dbt-core` is what it is today because community members like you have opened issues, provided feedback, and contributed to the knowledge loop for the entire communtiy. Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||
|
||||
### Defining the problem
|
||||
|
||||
If you have an idea for a new feature or if you've discovered a bug in `dbt-core`, the first step is to open an issue. Please check the list of [open issues](https://github.com/dbt-labs/dbt-core/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt-core` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
|
||||
> **Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed.
|
||||
|
||||
### Discussing the idea
|
||||
|
||||
After you open an issue, a `dbt-core` maintainer will follow up by commenting on your issue (usually within 1-3 days) to explore your idea further and advise on how to implement the suggested changes. In many cases, community members will chime in with their own thoughts on the problem statement. If you as the issue creator are interested in submitting a Pull Request to address the issue, you should indicate this in the body of the issue. The `dbt-core` maintainers are _always_ happy to help contributors with the implementation of fixes and features, so please also indicate if there's anything you're unsure about or could use guidance around in the issue.
|
||||
|
||||
### Submitting a change
|
||||
|
||||
If an issue is appropriately well scoped and describes a beneficial change to the `dbt-core` codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this.
|
||||
|
||||
The `dbt-core` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/dbt-labs/dbt-core/contribute) page.
|
||||
|
||||
Here's a good workflow:
|
||||
- Comment on the open issue, expressing your interest in contributing the required code change
|
||||
- Outline your planned implementation. If you want help getting started, ask!
|
||||
- Follow the steps outlined below to develop locally. Once you have opened a PR, one of the `dbt-core` maintainers will work with you to review your code.
|
||||
- Add a test! Tests are crucial for both fixes and new features alike. We want to make sure that code works as intended, and that it avoids any bugs previously encountered. Currently, the best resource for understanding `dbt-core`'s [unit](test/unit) and [integration](test/integration) tests is the tests themselves. One of the maintainers can help by pointing out relevant examples.
|
||||
- Check your formatting and linting with [Flake8](https://flake8.pycqa.org/en/latest/#), [Black](https://github.com/psf/black), and the rest of the hooks we have in our [pre-commit](https://pre-commit.com/) [config](https://github.com/dbt-labs/dbt-core/blob/75201be9db1cb2c6c01fa7e71a314f5e5beb060a/.pre-commit-config.yaml).
|
||||
|
||||
In some cases, the right resolution to an open issue might be tangential to the `dbt-core` codebase. The right path forward might be a documentation update or a change that can be made in user-space. In other cases, the issue might describe functionality that the `dbt-core` maintainers are unwilling or unable to incorporate into the `dbt-core` codebase. When it is determined that an open issue describes functionality that will not translate to a code change in the `dbt-core` repository, the issue will be tagged with the `wontfix` label (see below) and closed.
|
||||
|
||||
### Using issue labels
|
||||
|
||||
The `dbt-core` maintainers use labels to categorize open issues. Most labels describe the domain in the `dbt-core` codebase germane to the discussion.
|
||||
|
||||
| tag | description |
|
||||
| --- | ----------- |
|
||||
| [triage](https://github.com/dbt-labs/dbt-core/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt-core` maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/dbt-labs/dbt-core/labels/bug) | This issue represents a defect or regression in `dbt-core` |
|
||||
| [enhancement](https://github.com/dbt-labs/dbt-core/labels/enhancement) | This issue represents net-new functionality in `dbt-core` |
|
||||
| [good first issue](https://github.com/dbt-labs/dbt-core/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt-core` codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/dbt-labs/dbt-core/labels/help%20wanted) / [discussion](https://github.com/dbt-labs/dbt-core/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/dbt-labs/dbt-core/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt-core` maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/dbt-labs/dbt-core/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt-core` maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/dbt-labs/dbt-core/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt-core` maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/dbt-labs/dbt-core/labels/wontfix) | This issue does not require a code change in the `dbt-core` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
|
||||
#### Branching Strategy
|
||||
|
||||
`dbt-core` has three types of branches:
|
||||
|
||||
- **Trunks** are where active development of the next release takes place. There is one trunk named `main` at the time of writing this, and will be the default branch of the repository.
|
||||
- **Release Branches** track a specific, not yet complete release of `dbt-core`. Each minor version release has a corresponding release branch. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of `dbt-core`.
|
||||
- **Feature Branches** track individual features and fixes. On completion they should be merged into the trunk branch or a specific release branch.
|
||||
- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`).
|
||||
- **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones.
|
||||
- **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...)
|
||||
|
||||
## Getting the code
|
||||
|
||||
@@ -85,11 +33,11 @@ You will need `git` in order to download and modify the `dbt-core` source code.
|
||||
|
||||
If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt-core` by forking the `dbt-core` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
|
||||
1. fork the `dbt-core` repository
|
||||
2. clone your fork locally
|
||||
3. check out a new branch for your proposed changes
|
||||
4. push changes to your fork
|
||||
5. open a pull request against `dbt-labs/dbt` from your forked repository
|
||||
1. Fork the `dbt-core` repository
|
||||
2. Clone your fork locally
|
||||
3. Check out a new branch for your proposed changes
|
||||
4. Push changes to your fork
|
||||
5. Open a pull request against `dbt-labs/dbt-core` from your forked repository
|
||||
|
||||
### dbt Labs contributors
|
||||
|
||||
@@ -101,19 +49,21 @@ There are some tools that will be helpful to you in developing locally. While th
|
||||
|
||||
### Tools
|
||||
|
||||
A short list of tools used in `dbt-core` testing that will be helpful to your understanding:
|
||||
These are the tools used in `dbt-core` development and testing:
|
||||
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, Python 3.8, and Python 3.9
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to discover/run tests
|
||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`black`](https://github.com/psf/black) for code formatting
|
||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||
- [Github Actions](https://github.com/features/actions)
|
||||
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
||||
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
|
||||
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
||||
|
||||
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about them.
|
||||
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
||||
|
||||
#### virtual environments
|
||||
#### Virtual environments
|
||||
|
||||
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
||||
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
||||
@@ -124,12 +74,12 @@ source env/bin/activate
|
||||
|
||||
This will create and activate a new Python virtual environment.
|
||||
|
||||
#### docker and docker-compose
|
||||
#### Docker and `docker-compose`
|
||||
|
||||
Docker and docker-compose are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).
|
||||
Docker and `docker-compose` are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).
|
||||
|
||||
|
||||
#### postgres (optional)
|
||||
#### Postgres (optional)
|
||||
|
||||
For testing, and later in the examples in this document, you may want to have `psql` available so you can poke around in the database and see what happened. We recommend that you use [homebrew](https://brew.sh/) for that on macOS, and your package manager on Linux. You can install any version of the postgres client that you'd like. On macOS, with homebrew setup, you can run:
|
||||
|
||||
@@ -149,24 +99,26 @@ make dev
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
```
|
||||
|
||||
When `dbt-core` is installed this way, any changes you make to the `dbt-core` source code will be reflected immediately in your next `dbt-core` run.
|
||||
|
||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||
|
||||
### Running `dbt-core`
|
||||
|
||||
With your virtualenv activated, the `dbt-core` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
||||
|
||||
## Testing
|
||||
|
||||
Getting the `dbt-core` integration tests set up in your local environment will be very helpful as you start to make changes to your local version of `dbt-core`. The section that follows outlines some helpful tips for setting up the test environment.
|
||||
Once you're able to manually test that your code change is working as expected, it's important to run existing automated tests, as well as adding some new ones. These tests will ensure that:
|
||||
- Your code changes do not unexpectedly break other established functionality
|
||||
- Your code changes can handle all known edge cases
|
||||
- The functionality you're adding will _keep_ working in the future
|
||||
|
||||
Although `dbt-core` works with a number of different databases, you won't need to supply credentials for every one of these databases in your test environment. Instead you can test all dbt-core code changes with Python and Postgres.
|
||||
Although `dbt-core` works with a number of different databases, you won't need to supply credentials for every one of these databases in your test environment. Instead, you can test most `dbt-core` code changes with Python and Postgres.
|
||||
|
||||
### Initial setup
|
||||
|
||||
We recommend starting with `dbt-core`'s Postgres tests. These tests cover most of the functionality in `dbt-core`, are the fastest to run, and are the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||
|
||||
```sh
|
||||
make setup-db
|
||||
@@ -192,48 +144,50 @@ make test
|
||||
# Runs postgres integration tests with py38 in "fail fast" mode.
|
||||
make integration
|
||||
```
|
||||
> These make targets assume you have a local install of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
||||
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
||||
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
||||
|
||||
Check out the other targets in the Makefile to see other commonly used test
|
||||
suites.
|
||||
|
||||
#### `pre-commit`
|
||||
[`pre-commit`](https.pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
||||
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
||||
|
||||
#### `tox`
|
||||
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, and Python 3.9 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
|
||||
#### `pytest`
|
||||
|
||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv
|
||||
active and dev dependencies installed you can do things like:
|
||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:
|
||||
|
||||
```sh
|
||||
# run specific postgres integration tests
|
||||
python -m pytest -m profile_postgres test/integration/001_simple_copy_test
|
||||
# run all unit tests in a file
|
||||
python -m pytest test/unit/test_graph.py
|
||||
python3 -m pytest test/unit/test_graph.py
|
||||
# run a specific unit test
|
||||
python -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
python3 -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
# run specific Postgres integration tests (old way)
|
||||
python3 -m pytest -m profile_postgres test/integration/074_postgres_unlogged_table_tests
|
||||
# run specific Postgres integration tests (new way)
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
> [Here](https://docs.pytest.org/en/reorganize-docs/new-docs/user/commandlineuseful.html)
|
||||
> is a list of useful command-line options for `pytest` to use while developing.
|
||||
|
||||
> See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options.
|
||||
|
||||
## Adding CHANGELOG Entry
|
||||
|
||||
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
||||
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
||||
|
||||
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
|
||||
|
||||
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
|
||||
|
||||
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.
|
||||
|
||||
## Submitting a Pull Request
|
||||
|
||||
dbt Labs provides a CI environment to test changes to specific adapters, and periodic maintenance checks of `dbt-core` through Github Actions. For example, if you submit a pull request to the `dbt-redshift` repo, GitHub will trigger automated code checks and tests against Redshift.
|
||||
|
||||
A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
- First time contributors should note code checks + unit tests require a maintainer to approve.
|
||||
|
||||
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
||||
|
||||
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# See `/docker` for a generic and production-ready docker file
|
||||
##
|
||||
|
||||
FROM ubuntu:20.04
|
||||
FROM ubuntu:22.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
@@ -46,6 +46,9 @@ RUN apt-get update \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-venv \
|
||||
python3.10 \
|
||||
python3.10-dev \
|
||||
python3.10-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
|
||||
19
Makefile
19
Makefile
@@ -9,8 +9,7 @@ endif
|
||||
.PHONY: dev
|
||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt && \
|
||||
pre-commit install
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@@ -34,27 +33,27 @@ lint: .env ## Runs flake8 and mypy code checks against staged changes.
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
|
||||
.PHONY: unit
|
||||
unit: .env ## Runs unit tests with py38.
|
||||
unit: .env ## Runs unit tests with py
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py38
|
||||
$(DOCKER_CMD) tox -e py
|
||||
|
||||
.PHONY: test
|
||||
test: .env ## Runs unit tests with py38 and code checks against staged changes.
|
||||
test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py38; \
|
||||
$(DOCKER_CMD) tox -e py; \
|
||||
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py38.
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py38-integration -- -nauto
|
||||
$(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py38 in "fail fast" mode.
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py38-integration -- -x -nauto
|
||||
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
|
||||

|
||||

|
||||
|
||||
## Understanding dbt
|
||||
|
||||
|
||||
7
core/dbt/__init__.py
Normal file
7
core/dbt/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# N.B.
|
||||
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||
# The matching statement is in plugins/postgres/dbt/__init__.py
|
||||
|
||||
from pkgutil import extend_path
|
||||
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
7
core/dbt/adapters/__init__.py
Normal file
7
core/dbt/adapters/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# N.B.
|
||||
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||
# The matching statement is in plugins/postgres/dbt/adapters/__init__.py
|
||||
|
||||
from pkgutil import extend_path
|
||||
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from threading import get_ident
|
||||
from typing import Dict, Tuple, Hashable, Optional, ContextManager, List, Union
|
||||
from typing import Dict, Tuple, Hashable, Optional, ContextManager, List
|
||||
|
||||
import agate
|
||||
|
||||
@@ -281,15 +281,15 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
:param str sql: The sql to execute.
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
|
||||
@@ -130,9 +130,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
methods are marked with a (passable) in their docstrings. Check docstrings
|
||||
for type information, etc.
|
||||
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}". in the
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}" in the
|
||||
adapter's internal project.
|
||||
|
||||
To invoke a method in an adapter macro, call it on the 'adapter' Jinja
|
||||
object using dot syntax.
|
||||
|
||||
To invoke a method in model code, add the @available decorator atop a method
|
||||
declaration. Methods are invoked as macros.
|
||||
|
||||
Methods:
|
||||
- exception_handler
|
||||
- date_function
|
||||
@@ -221,7 +227,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL. This is a thin wrapper around
|
||||
ConnectionManager.execute.
|
||||
|
||||
@@ -229,8 +235,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
|
||||
|
||||
@@ -270,12 +276,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
return self._macro_manifest_lazy
|
||||
|
||||
def load_macro_manifest(self) -> MacroManifest:
|
||||
def load_macro_manifest(self, base_macros_only=False) -> MacroManifest:
|
||||
# base_macros_only is for the test framework
|
||||
if self._macro_manifest_lazy is None:
|
||||
# avoid a circular import
|
||||
from dbt.parser.manifest import ManifestLoader
|
||||
|
||||
manifest = ManifestLoader.load_macros(self.config, self.connections.set_query_header)
|
||||
manifest = ManifestLoader.load_macros(
|
||||
self.config, self.connections.set_query_header, base_macros_only=base_macros_only
|
||||
)
|
||||
# TODO CT-211
|
||||
self._macro_manifest_lazy = manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
@@ -337,11 +346,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _relations_cache_for_schemas(self, manifest: Manifest) -> None:
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Set[BaseRelation] = None
|
||||
) -> None:
|
||||
"""Populate the relations cache for the given schemas. Returns an
|
||||
iterable of the schemas populated, as strings.
|
||||
"""
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
if not cache_schemas:
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[List[BaseRelation]]] = []
|
||||
for cache_schema in cache_schemas:
|
||||
@@ -367,14 +379,16 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(self, manifest: Manifest, clear: bool = False) -> None:
|
||||
def set_relations_cache(
|
||||
self, manifest: Manifest, clear: bool = False, required_schemas: Set[BaseRelation] = None
|
||||
) -> None:
|
||||
"""Run a query that gets a populated cache of the relations in the
|
||||
database and set the cache on this adapter.
|
||||
"""
|
||||
with self.cache.lock:
|
||||
if clear:
|
||||
self.cache.clear()
|
||||
self._relations_cache_for_schemas(manifest)
|
||||
self._relations_cache_for_schemas(manifest, required_schemas)
|
||||
|
||||
@available
|
||||
def cache_added(self, relation: Optional[BaseRelation]) -> str:
|
||||
|
||||
@@ -140,8 +140,6 @@ class AdapterContainer:
|
||||
raise InternalException(f"No plugin found for {plugin_name}") from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
if plugin.dependencies is None:
|
||||
continue
|
||||
for dep in plugin.dependencies:
|
||||
if dep not in seen:
|
||||
plugin_names.append(dep)
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import (
|
||||
List,
|
||||
Generic,
|
||||
TypeVar,
|
||||
ClassVar,
|
||||
Tuple,
|
||||
Union,
|
||||
Dict,
|
||||
@@ -88,10 +87,13 @@ class AdapterProtocol( # type: ignore[misc]
|
||||
Compiler_T,
|
||||
],
|
||||
):
|
||||
AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]]
|
||||
Column: ClassVar[Type[Column_T]]
|
||||
Relation: ClassVar[Type[Relation_T]]
|
||||
ConnectionManager: ClassVar[Type[ConnectionManager_T]]
|
||||
# N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a
|
||||
# ClassVar due to the restirctiveness of PEP-526
|
||||
# See: https://github.com/python/mypy/issues/5144
|
||||
AdapterSpecificConfigs: Type[AdapterConfig_T]
|
||||
Column: Type[Column_T]
|
||||
Relation: Type[Relation_T]
|
||||
ConnectionManager: Type[ConnectionManager_T]
|
||||
connections: ConnectionManager_T
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig):
|
||||
@@ -155,7 +157,7 @@ class AdapterProtocol( # type: ignore[misc]
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
...
|
||||
|
||||
def get_compiler(self) -> Compiler_T:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# this module exists to resolve circular imports with the events module
|
||||
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||
@@ -14,7 +14,7 @@ def lowercase(value: Optional[str]) -> Optional[str]:
|
||||
return value.lower()
|
||||
|
||||
|
||||
def _make_key(relation) -> _ReferenceKey:
|
||||
def _make_key(relation: Any) -> _ReferenceKey:
|
||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||
to keep track of quoting
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
|
||||
import agate
|
||||
|
||||
@@ -78,7 +78,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
return connection, cursor
|
||||
|
||||
@abc.abstractclassmethod
|
||||
def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`get_response` is not implemented for this adapter!"
|
||||
@@ -117,7 +117,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[AdapterResponse, str], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql, auto_begin)
|
||||
response = self.get_response(cursor)
|
||||
|
||||
@@ -171,6 +171,7 @@ class SQLAdapter(BaseAdapter):
|
||||
"relation": relation,
|
||||
}
|
||||
self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||
self.commit_if_has_connection()
|
||||
# we can update the cache here
|
||||
self.cache.drop_schema(relation.database, relation.schema)
|
||||
|
||||
@@ -218,3 +219,25 @@ class SQLAdapter(BaseAdapter):
|
||||
kwargs = {"information_schema": information_schema, "schema": schema}
|
||||
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
|
||||
return results[0][0] > 0
|
||||
|
||||
# This is for use in the test suite
|
||||
def run_sql_for_tests(self, sql, fetch, conn):
|
||||
cursor = conn.handle.cursor()
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
if hasattr(conn.handle, "commit"):
|
||||
conn.handle.commit()
|
||||
if fetch == "one":
|
||||
return cursor.fetchone()
|
||||
elif fetch == "all":
|
||||
return cursor.fetchall()
|
||||
else:
|
||||
return
|
||||
except BaseException as e:
|
||||
if conn.handle and not getattr(conn.handle, "closed", True):
|
||||
conn.handle.rollback()
|
||||
print(sql)
|
||||
print(e)
|
||||
raise
|
||||
finally:
|
||||
conn.transaction_open = False
|
||||
|
||||
@@ -28,7 +28,7 @@ def _is_commit(revision: str) -> bool:
|
||||
|
||||
|
||||
def _raise_git_cloning_error(repo, revision, error):
|
||||
stderr = error.stderr.decode("utf-8").strip()
|
||||
stderr = error.stderr.strip()
|
||||
if "usage: git" in stderr:
|
||||
stderr = stderr.split("\nusage: git")[0]
|
||||
if re.match("fatal: destination path '(.+)' already exists", stderr):
|
||||
@@ -115,8 +115,8 @@ def checkout(cwd, repo, revision=None):
|
||||
try:
|
||||
return _checkout(cwd, repo, revision)
|
||||
except CommandResultError as exc:
|
||||
stderr = exc.stderr.decode("utf-8").strip()
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
stderr = exc.stderr.strip()
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
|
||||
|
||||
def get_current_sha(cwd):
|
||||
@@ -142,7 +142,7 @@ def clone_and_checkout(
|
||||
subdirectory=subdirectory,
|
||||
)
|
||||
except CommandResultError as exc:
|
||||
err = exc.stderr.decode("utf-8")
|
||||
err = exc.stderr
|
||||
exists = re.match("fatal: destination path '(.+)' already exists", err)
|
||||
if not exists:
|
||||
raise_git_cloning_problem(repo)
|
||||
|
||||
@@ -1,7 +1,17 @@
|
||||
import functools
|
||||
from typing import Any, Dict, List
|
||||
import requests
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import RegistryProgressMakingGETRequest, RegistryProgressGETResponse
|
||||
from dbt.events.types import (
|
||||
RegistryProgressMakingGETRequest,
|
||||
RegistryProgressGETResponse,
|
||||
RegistryIndexProgressMakingGETRequest,
|
||||
RegistryIndexProgressGETResponse,
|
||||
RegistryResponseUnexpectedType,
|
||||
RegistryResponseMissingTopKeys,
|
||||
RegistryResponseMissingNestedKeys,
|
||||
RegistryResponseExtraNestedKeys,
|
||||
)
|
||||
from dbt.utils import memoized, _connection_exception_retry as connection_exception_retry
|
||||
from dbt import deprecations
|
||||
import os
|
||||
@@ -12,55 +22,86 @@ else:
|
||||
DEFAULT_REGISTRY_BASE_URL = "https://hub.getdbt.com/"
|
||||
|
||||
|
||||
def _get_url(url, registry_base_url=None):
|
||||
def _get_url(name, registry_base_url=None):
|
||||
if registry_base_url is None:
|
||||
registry_base_url = DEFAULT_REGISTRY_BASE_URL
|
||||
url = "api/v1/{}.json".format(name)
|
||||
|
||||
return "{}{}".format(registry_base_url, url)
|
||||
|
||||
|
||||
def _get_with_retries(path, registry_base_url=None):
|
||||
get_fn = functools.partial(_get, path, registry_base_url)
|
||||
def _get_with_retries(package_name, registry_base_url=None):
|
||||
get_fn = functools.partial(_get, package_name, registry_base_url)
|
||||
return connection_exception_retry(get_fn, 5)
|
||||
|
||||
|
||||
def _get(path, registry_base_url=None):
|
||||
url = _get_url(path, registry_base_url)
|
||||
def _get(package_name, registry_base_url=None):
|
||||
url = _get_url(package_name, registry_base_url)
|
||||
fire_event(RegistryProgressMakingGETRequest(url=url))
|
||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||
resp = requests.get(url, timeout=30)
|
||||
fire_event(RegistryProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||
resp.raise_for_status()
|
||||
|
||||
# It is unexpected for the content of the response to be None so if it is, raising this error
|
||||
# will cause this function to retry (if called within _get_with_retries) and hopefully get
|
||||
# a response. This seems to happen when there's an issue with the Hub.
|
||||
# The response should always be a dictionary. Anything else is unexpected, raise error.
|
||||
# Raising this error will cause this function to retry (if called within _get_with_retries)
|
||||
# and hopefully get a valid response. This seems to happen when there's an issue with the Hub.
|
||||
# Since we control what we expect the HUB to return, this is safe.
|
||||
# See https://github.com/dbt-labs/dbt-core/issues/4577
|
||||
if resp.json() is None:
|
||||
raise requests.exceptions.ContentDecodingError(
|
||||
"Request error: The response is None", response=resp
|
||||
# and https://github.com/dbt-labs/dbt-core/issues/4849
|
||||
response = resp.json()
|
||||
|
||||
if not isinstance(response, dict): # This will also catch Nonetype
|
||||
error_msg = (
|
||||
f"Request error: Expected a response type of <dict> but got {type(response)} instead"
|
||||
)
|
||||
return resp.json()
|
||||
fire_event(RegistryResponseUnexpectedType(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
# check for expected top level keys
|
||||
expected_keys = {"name", "versions"}
|
||||
if not expected_keys.issubset(response):
|
||||
error_msg = (
|
||||
f"Request error: Expected the response to contain keys {expected_keys} "
|
||||
f"but is missing {expected_keys.difference(set(response))}"
|
||||
)
|
||||
fire_event(RegistryResponseMissingTopKeys(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
# check for the keys we need nested under each version
|
||||
expected_version_keys = {"name", "packages", "downloads"}
|
||||
all_keys = set().union(*(response["versions"][d] for d in response["versions"]))
|
||||
if not expected_version_keys.issubset(all_keys):
|
||||
error_msg = (
|
||||
"Request error: Expected the response for the version to contain keys "
|
||||
f"{expected_version_keys} but is missing {expected_version_keys.difference(all_keys)}"
|
||||
)
|
||||
fire_event(RegistryResponseMissingNestedKeys(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
# all version responses should contain identical keys.
|
||||
has_extra_keys = set().difference(*(response["versions"][d] for d in response["versions"]))
|
||||
if has_extra_keys:
|
||||
error_msg = (
|
||||
"Request error: Keys for all versions do not match. Found extra key(s) "
|
||||
f"of {has_extra_keys}."
|
||||
)
|
||||
fire_event(RegistryResponseExtraNestedKeys(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def index(registry_base_url=None):
|
||||
return _get_with_retries("api/v1/index.json", registry_base_url)
|
||||
_get_cached = memoized(_get_with_retries)
|
||||
|
||||
|
||||
index_cached = memoized(index)
|
||||
|
||||
|
||||
def packages(registry_base_url=None):
|
||||
return _get_with_retries("api/v1/packages.json", registry_base_url)
|
||||
|
||||
|
||||
def package(name, registry_base_url=None):
|
||||
response = _get_with_retries("api/v1/{}.json".format(name), registry_base_url)
|
||||
|
||||
def package(package_name, registry_base_url=None) -> Dict[str, Any]:
|
||||
# returns a dictionary of metadata for all versions of a package
|
||||
response = _get_cached(package_name, registry_base_url)
|
||||
# Either redirectnamespace or redirectname in the JSON response indicate a redirect
|
||||
# redirectnamespace redirects based on package ownership
|
||||
# redirectname redirects based on package name
|
||||
# Both can be present at the same time, or neither. Fails gracefully to old name
|
||||
|
||||
if ("redirectnamespace" in response) or ("redirectname" in response):
|
||||
|
||||
if ("redirectnamespace" in response) and response["redirectnamespace"] is not None:
|
||||
@@ -74,15 +115,49 @@ def package(name, registry_base_url=None):
|
||||
use_name = response["name"]
|
||||
|
||||
new_nwo = use_namespace + "/" + use_name
|
||||
deprecations.warn("package-redirect", old_name=name, new_name=new_nwo)
|
||||
deprecations.warn("package-redirect", old_name=package_name, new_name=new_nwo)
|
||||
return response["versions"]
|
||||
|
||||
|
||||
def package_version(package_name, version, registry_base_url=None) -> Dict[str, Any]:
|
||||
# returns the metadata of a specific version of a package
|
||||
response = package(package_name, registry_base_url)
|
||||
return response[version]
|
||||
|
||||
|
||||
def get_available_versions(package_name) -> List["str"]:
|
||||
# returns a list of all available versions of a package
|
||||
response = package(package_name)
|
||||
return list(response)
|
||||
|
||||
|
||||
def _get_index(registry_base_url=None):
|
||||
|
||||
url = _get_url("index", registry_base_url)
|
||||
fire_event(RegistryIndexProgressMakingGETRequest(url=url))
|
||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||
resp = requests.get(url, timeout=30)
|
||||
fire_event(RegistryIndexProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||
resp.raise_for_status()
|
||||
|
||||
# The response should be a list. Anything else is unexpected, raise an error.
|
||||
# Raising this error will cause this function to retry and hopefully get a valid response.
|
||||
|
||||
response = resp.json()
|
||||
|
||||
if not isinstance(response, list): # This will also catch Nonetype
|
||||
error_msg = (
|
||||
f"Request error: The response type of {type(response)} is not valid: {resp.text}"
|
||||
)
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def package_version(name, version, registry_base_url=None):
|
||||
return _get_with_retries("api/v1/{}/{}.json".format(name, version), registry_base_url)
|
||||
def index(registry_base_url=None) -> List[str]:
|
||||
# this returns a list of all packages on the Hub
|
||||
get_index_fn = functools.partial(_get_index, registry_base_url)
|
||||
return connection_exception_retry(get_index_fn, 5)
|
||||
|
||||
|
||||
def get_available_versions(name):
|
||||
response = package(name)
|
||||
return list(response["versions"])
|
||||
index_cached = memoized(index)
|
||||
|
||||
@@ -246,16 +246,17 @@ def _supports_long_paths() -> bool:
|
||||
# https://stackoverflow.com/a/35097999/11262881
|
||||
# I don't know exaclty what he means, but I am inclined to believe him as
|
||||
# he's pretty active on Python windows bugs!
|
||||
try:
|
||||
dll = WinDLL("ntdll")
|
||||
except OSError: # I don't think this happens? you need ntdll to run python
|
||||
return False
|
||||
# not all windows versions have it at all
|
||||
if not hasattr(dll, "RtlAreLongPathsEnabled"):
|
||||
return False
|
||||
# tell windows we want to get back a single unsigned byte (a bool).
|
||||
dll.RtlAreLongPathsEnabled.restype = c_bool
|
||||
return dll.RtlAreLongPathsEnabled()
|
||||
else:
|
||||
try:
|
||||
dll = WinDLL("ntdll")
|
||||
except OSError: # I don't think this happens? you need ntdll to run python
|
||||
return False
|
||||
# not all windows versions have it at all
|
||||
if not hasattr(dll, "RtlAreLongPathsEnabled"):
|
||||
return False
|
||||
# tell windows we want to get back a single unsigned byte (a bool).
|
||||
dll.RtlAreLongPathsEnabled.restype = c_bool
|
||||
return dll.RtlAreLongPathsEnabled()
|
||||
|
||||
|
||||
def convert_path(path: str) -> str:
|
||||
@@ -443,7 +444,11 @@ def download_with_retries(
|
||||
connection_exception_retry(download_fn, 5)
|
||||
|
||||
|
||||
def download(url: str, path: str, timeout: Optional[Union[float, tuple]] = None) -> None:
|
||||
def download(
|
||||
url: str,
|
||||
path: str,
|
||||
timeout: Optional[Union[float, Tuple[float, float], Tuple[float, None]]] = None,
|
||||
) -> None:
|
||||
path = convert_path(path)
|
||||
connection_timeout = timeout or float(os.getenv("DBT_HTTP_TIMEOUT", 10))
|
||||
response = requests.get(url, timeout=connection_timeout)
|
||||
|
||||
@@ -8,6 +8,7 @@ try:
|
||||
except ImportError:
|
||||
from yaml import Loader, SafeLoader, Dumper # type: ignore # noqa: F401
|
||||
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
YAML_ERROR_MESSAGE = """
|
||||
Syntax error near line {line_number}
|
||||
@@ -20,6 +21,27 @@ Raw Error:
|
||||
""".strip()
|
||||
|
||||
|
||||
class UniqueKeyLoader(SafeLoader):
|
||||
"""A subclass that checks for unique yaml mapping nodes.
|
||||
|
||||
This class extends `SafeLoader` from the `yaml` library to check for
|
||||
unique top level keys (mapping nodes). See issue (https://github.com/yaml/pyyaml/issues/165)
|
||||
and solution (https://gist.github.com/pypt/94d747fe5180851196eb?permalink_comment_id=4015118).
|
||||
"""
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
mapping = set()
|
||||
self.flatten_mapping(node) # This processes yaml anchors / merge keys (<<).
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
if key in mapping:
|
||||
raise dbt.exceptions.DuplicateYamlKeyException(
|
||||
f"Duplicate {key!r} key found in yaml file"
|
||||
)
|
||||
mapping.add(key)
|
||||
return super().construct_mapping(node, deep)
|
||||
|
||||
|
||||
def line_no(i, line, width=3):
|
||||
line_number = str(i).ljust(width)
|
||||
return "{}| {}".format(line_number, line)
|
||||
@@ -47,13 +69,16 @@ def contextualized_yaml_error(raw_contents, error):
|
||||
)
|
||||
|
||||
|
||||
def safe_load(contents) -> Optional[Dict[str, Any]]:
|
||||
return yaml.load(contents, Loader=SafeLoader)
|
||||
def safe_load(contents, unique=False) -> Optional[Dict[str, Any]]:
|
||||
if unique:
|
||||
return yaml.load(contents, Loader=UniqueKeyLoader)
|
||||
else:
|
||||
return yaml.load(contents, Loader=SafeLoader)
|
||||
|
||||
|
||||
def load_yaml_text(contents):
|
||||
def load_yaml_text(contents, path=None):
|
||||
try:
|
||||
return safe_load(contents)
|
||||
return safe_load(contents, unique=True)
|
||||
except (yaml.scanner.ScannerError, yaml.YAMLError) as e:
|
||||
if hasattr(e, "problem_mark"):
|
||||
error = contextualized_yaml_error(contents, e)
|
||||
@@ -61,3 +86,9 @@ def load_yaml_text(contents):
|
||||
error = str(e)
|
||||
|
||||
raise dbt.exceptions.ValidationException(error)
|
||||
except dbt.exceptions.DuplicateYamlKeyException as e:
|
||||
# TODO: We may want to raise an exception instead of a warning in the future.
|
||||
if path:
|
||||
e.msg = f"{e} {path.searched_path}/{path.relative_path}."
|
||||
dbt.exceptions.warn_or_raise(e, log_fmt=warning_tag("{}"))
|
||||
return safe_load(contents)
|
||||
|
||||
@@ -132,7 +132,11 @@ def _all_source_paths(
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
return list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths))
|
||||
# We need to turn a list of lists into just a list, then convert to a set to
|
||||
# get only unique elements, then back to a list
|
||||
return list(
|
||||
set(list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)))
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
from typing import Dict, Any, Tuple, Optional, Union, Callable
|
||||
import re
|
||||
import os
|
||||
|
||||
from dbt.clients.jinja import get_rendered, catch_jinja
|
||||
from dbt.context.target import TargetContext
|
||||
from dbt.context.secret import SecretContext
|
||||
from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER
|
||||
from dbt.context.base import BaseContext
|
||||
from dbt.contracts.connection import HasCredentials
|
||||
from dbt.exceptions import DbtProjectError, CompilationException, RecursionException
|
||||
from dbt.utils import deep_map_render
|
||||
from dbt.logger import SECRET_ENV_PREFIX
|
||||
|
||||
|
||||
Keypath = Tuple[Union[str, int], ...]
|
||||
@@ -114,11 +117,9 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
def name(self):
|
||||
"Project config"
|
||||
|
||||
# Uses SecretRenderer
|
||||
def get_package_renderer(self) -> BaseRenderer:
|
||||
return PackageRenderer(self.context)
|
||||
|
||||
def get_selector_renderer(self) -> BaseRenderer:
|
||||
return SelectorRenderer(self.context)
|
||||
return PackageRenderer(self.ctx_obj.cli_vars)
|
||||
|
||||
def render_project(
|
||||
self,
|
||||
@@ -136,8 +137,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
return package_renderer.render_data(packages)
|
||||
|
||||
def render_selectors(self, selectors: Dict[str, Any]):
|
||||
selector_renderer = self.get_selector_renderer()
|
||||
return selector_renderer.render_data(selectors)
|
||||
return self.render_data(selectors)
|
||||
|
||||
def render_entry(self, value: Any, keypath: Keypath) -> Any:
|
||||
result = super().render_entry(value, keypath)
|
||||
@@ -165,18 +165,10 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
return True
|
||||
|
||||
|
||||
class SelectorRenderer(BaseRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
return "Selector config"
|
||||
|
||||
|
||||
class SecretRenderer(BaseRenderer):
|
||||
def __init__(self, cli_vars: Optional[Dict[str, Any]] = None) -> None:
|
||||
def __init__(self, cli_vars: Dict[str, Any] = {}) -> None:
|
||||
# Generate contexts here because we want to save the context
|
||||
# object in order to retrieve the env_vars.
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
self.ctx_obj = SecretContext(cli_vars)
|
||||
context = self.ctx_obj.to_dict()
|
||||
super().__init__(context)
|
||||
@@ -185,6 +177,23 @@ class SecretRenderer(BaseRenderer):
|
||||
def name(self):
|
||||
return "Secret"
|
||||
|
||||
def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:
|
||||
rendered = super().render_value(value, keypath)
|
||||
if SECRET_ENV_PREFIX in str(rendered):
|
||||
search_group = f"({SECRET_ENV_PREFIX}(.*))"
|
||||
pattern = SECRET_PLACEHOLDER.format(search_group).replace("$", r"\$")
|
||||
m = re.search(
|
||||
pattern,
|
||||
rendered,
|
||||
)
|
||||
if m:
|
||||
found = m.group(1)
|
||||
value = os.environ[found]
|
||||
replace_this = SECRET_PLACEHOLDER.format(found)
|
||||
return rendered.replace(replace_this, value)
|
||||
else:
|
||||
return rendered
|
||||
|
||||
|
||||
class ProfileRenderer(SecretRenderer):
|
||||
@property
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import itertools
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, Mapping, Iterator, Iterable, Tuple, List, MutableSet, Type
|
||||
|
||||
@@ -312,22 +312,26 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
warn_or_error(msg, log_fmt=warning_tag("{}"))
|
||||
|
||||
def load_dependencies(self) -> Mapping[str, "RuntimeConfig"]:
|
||||
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
|
||||
if self.dependencies is None:
|
||||
all_projects = {self.project_name: self}
|
||||
internal_packages = get_include_paths(self.credentials.type)
|
||||
# raise exception if fewer installed packages than in packages.yml
|
||||
count_packages_specified = len(self.packages.packages) # type: ignore
|
||||
count_packages_installed = len(tuple(self._get_project_directories()))
|
||||
if count_packages_specified > count_packages_installed:
|
||||
raise_compiler_error(
|
||||
f"dbt found {count_packages_specified} package(s) "
|
||||
f"specified in packages.yml, but only "
|
||||
f"{count_packages_installed} package(s) installed "
|
||||
f'in {self.packages_install_path}. Run "dbt deps" to '
|
||||
f"install package dependencies."
|
||||
)
|
||||
project_paths = itertools.chain(internal_packages, self._get_project_directories())
|
||||
if base_only:
|
||||
# Test setup -- we want to load macros without dependencies
|
||||
project_paths = itertools.chain(internal_packages)
|
||||
else:
|
||||
# raise exception if fewer installed packages than in packages.yml
|
||||
count_packages_specified = len(self.packages.packages) # type: ignore
|
||||
count_packages_installed = len(tuple(self._get_project_directories()))
|
||||
if count_packages_specified > count_packages_installed:
|
||||
raise_compiler_error(
|
||||
f"dbt found {count_packages_specified} package(s) "
|
||||
f"specified in packages.yml, but only "
|
||||
f"{count_packages_installed} package(s) installed "
|
||||
f'in {self.packages_install_path}. Run "dbt deps" to '
|
||||
f"install package dependencies."
|
||||
)
|
||||
project_paths = itertools.chain(internal_packages, self._get_project_directories())
|
||||
for project_name, project in self.load_projects(project_paths):
|
||||
if project_name in all_projects:
|
||||
raise_compiler_error(
|
||||
@@ -413,6 +417,9 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
missing, any access to profile members results in an exception.
|
||||
"""
|
||||
|
||||
profile_name: str = field(repr=False)
|
||||
target_name: str = field(repr=False)
|
||||
|
||||
def __post_init__(self):
|
||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
||||
# `del` the attrs we don't want users touching.
|
||||
@@ -433,6 +440,56 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
# re-override the poisoned profile behavior
|
||||
return DictDefaultEmptyStr({})
|
||||
|
||||
def to_project_config(self, with_packages=False):
|
||||
"""Return a dict representation of the config that could be written to
|
||||
disk with `yaml.safe_dump` to get this configuration.
|
||||
|
||||
Overrides dbt.config.Project.to_project_config to omit undefined profile
|
||||
attributes.
|
||||
|
||||
:param with_packages bool: If True, include the serialized packages
|
||||
file in the root.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = deepcopy(
|
||||
{
|
||||
"name": self.project_name,
|
||||
"version": self.version,
|
||||
"project-root": self.project_root,
|
||||
"profile": "",
|
||||
"model-paths": self.model_paths,
|
||||
"macro-paths": self.macro_paths,
|
||||
"seed-paths": self.seed_paths,
|
||||
"test-paths": self.test_paths,
|
||||
"analysis-paths": self.analysis_paths,
|
||||
"docs-paths": self.docs_paths,
|
||||
"asset-paths": self.asset_paths,
|
||||
"target-path": self.target_path,
|
||||
"snapshot-paths": self.snapshot_paths,
|
||||
"clean-targets": self.clean_targets,
|
||||
"log-path": self.log_path,
|
||||
"quoting": self.quoting,
|
||||
"models": self.models,
|
||||
"on-run-start": self.on_run_start,
|
||||
"on-run-end": self.on_run_end,
|
||||
"dispatch": self.dispatch,
|
||||
"seeds": self.seeds,
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
result["query-comment"] = self.query_comment.to_dict(omit_none=True)
|
||||
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict(omit_none=True))
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from pathlib import Path
|
||||
from copy import deepcopy
|
||||
from typing import Dict, Any, Union
|
||||
from dbt.clients.yaml_helper import yaml, Loader, Dumper, load_yaml_text # noqa: F401
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from .renderer import SelectorRenderer
|
||||
from .renderer import BaseRenderer
|
||||
|
||||
from dbt.clients.system import (
|
||||
load_file_contents,
|
||||
@@ -57,7 +58,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
def render_from_dict(
|
||||
cls,
|
||||
data: Dict[str, Any],
|
||||
renderer: SelectorRenderer,
|
||||
renderer: BaseRenderer,
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
rendered = renderer.render_data(data)
|
||||
@@ -72,7 +73,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
def from_path(
|
||||
cls,
|
||||
path: Path,
|
||||
renderer: SelectorRenderer,
|
||||
renderer: BaseRenderer,
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
data = load_yaml_text(load_file_contents(str(path)))
|
||||
@@ -140,28 +141,33 @@ def validate_selector_default(selector_file: SelectorFile) -> None:
|
||||
# good to combine the two flows into one at some point.
|
||||
class SelectorDict:
|
||||
@classmethod
|
||||
def parse_dict_definition(cls, definition):
|
||||
def parse_dict_definition(cls, definition, selector_dict={}):
|
||||
key = list(definition)[0]
|
||||
value = definition[key]
|
||||
if isinstance(value, list):
|
||||
new_values = []
|
||||
for sel_def in value:
|
||||
new_value = cls.parse_from_definition(sel_def)
|
||||
new_value = cls.parse_from_definition(sel_def, selector_dict=selector_dict)
|
||||
new_values.append(new_value)
|
||||
value = new_values
|
||||
if key == "exclude":
|
||||
definition = {key: value}
|
||||
elif len(definition) == 1:
|
||||
definition = {"method": key, "value": value}
|
||||
elif key == "method" and value == "selector":
|
||||
sel_def = definition.get("value")
|
||||
if sel_def not in selector_dict:
|
||||
raise DbtSelectorsError(f"Existing selector definition for {sel_def} not found.")
|
||||
return selector_dict[definition["value"]]["definition"]
|
||||
return definition
|
||||
|
||||
@classmethod
|
||||
def parse_a_definition(cls, def_type, definition):
|
||||
def parse_a_definition(cls, def_type, definition, selector_dict={}):
|
||||
# this definition must be a list
|
||||
new_dict = {def_type: []}
|
||||
for sel_def in definition[def_type]:
|
||||
if isinstance(sel_def, dict):
|
||||
sel_def = cls.parse_from_definition(sel_def)
|
||||
sel_def = cls.parse_from_definition(sel_def, selector_dict=selector_dict)
|
||||
new_dict[def_type].append(sel_def)
|
||||
elif isinstance(sel_def, str):
|
||||
sel_def = SelectionCriteria.dict_from_single_spec(sel_def)
|
||||
@@ -171,15 +177,17 @@ class SelectorDict:
|
||||
return new_dict
|
||||
|
||||
@classmethod
|
||||
def parse_from_definition(cls, definition):
|
||||
def parse_from_definition(cls, definition, selector_dict={}):
|
||||
if isinstance(definition, str):
|
||||
definition = SelectionCriteria.dict_from_single_spec(definition)
|
||||
elif "union" in definition:
|
||||
definition = cls.parse_a_definition("union", definition)
|
||||
definition = cls.parse_a_definition("union", definition, selector_dict=selector_dict)
|
||||
elif "intersection" in definition:
|
||||
definition = cls.parse_a_definition("intersection", definition)
|
||||
definition = cls.parse_a_definition(
|
||||
"intersection", definition, selector_dict=selector_dict
|
||||
)
|
||||
elif isinstance(definition, dict):
|
||||
definition = cls.parse_dict_definition(definition)
|
||||
definition = cls.parse_dict_definition(definition, selector_dict=selector_dict)
|
||||
return definition
|
||||
|
||||
# This is the normal entrypoint of this code. Give it the
|
||||
@@ -190,6 +198,8 @@ class SelectorDict:
|
||||
for selector in selectors:
|
||||
sel_name = selector["name"]
|
||||
selector_dict[sel_name] = selector
|
||||
definition = cls.parse_from_definition(selector["definition"])
|
||||
definition = cls.parse_from_definition(
|
||||
selector["definition"], selector_dict=deepcopy(selector_dict)
|
||||
)
|
||||
selector_dict[sel_name]["definition"] = definition
|
||||
return selector_dict
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
from typing import Dict, Any
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from xmlrpc.client import Boolean
|
||||
from dbt.contracts.project import UserConfig
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.config import Profile, Project, read_user_config
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.exceptions import raise_compiler_error, ValidationException
|
||||
from dbt.events.types import InvalidVarsYAML
|
||||
from dbt.exceptions import ValidationException, raise_compiler_error
|
||||
|
||||
|
||||
def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
@@ -21,3 +27,49 @@ def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
except ValidationException:
|
||||
fire_event(InvalidVarsYAML())
|
||||
raise
|
||||
|
||||
|
||||
def get_project_config(
|
||||
project_path: str,
|
||||
profile_name: str,
|
||||
args: Namespace = Namespace(),
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
profile: Optional[Profile] = None,
|
||||
user_config: Optional[UserConfig] = None,
|
||||
return_dict: Boolean = True,
|
||||
) -> Union[Project, Dict]:
|
||||
"""Returns a project config (dict or object) from a given project path and profile name.
|
||||
|
||||
Args:
|
||||
project_path: Path to project
|
||||
profile_name: Name of profile
|
||||
args: An argparse.Namespace that represents what would have been passed in on the
|
||||
command line (optional)
|
||||
cli_vars: A dict of any vars that would have been passed in on the command line (optional)
|
||||
(see parse_cli_vars above for formatting details)
|
||||
profile: A dbt.config.profile.Profile object (optional)
|
||||
user_config: A dbt.contracts.project.UserConfig object (optional)
|
||||
return_dict: Return a dict if true, return the full dbt.config.project.Project object if false
|
||||
|
||||
Returns:
|
||||
A full project config
|
||||
|
||||
"""
|
||||
# Generate a profile if not provided
|
||||
if profile is None:
|
||||
# Generate user_config if not provided
|
||||
if user_config is None:
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
# Update flags
|
||||
flags.set_from_args(args, user_config)
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
profile = Profile.render_from_args(args, ProfileRenderer(cli_vars), profile_name)
|
||||
# Generate a project
|
||||
project = Project.from_project_root(
|
||||
project_path,
|
||||
DbtProjectYamlRenderer(profile),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
# Return
|
||||
return project.to_project_config() if return_dict else project
|
||||
|
||||
@@ -1 +1,51 @@
|
||||
# Contexts and Jinja rendering
|
||||
|
||||
Contexts are used for Jinja rendering. They include context methods, executable macros, and various settings that are available in Jinja.
|
||||
|
||||
The most common entrypoint to Jinja rendering in dbt is a method named `get_rendered`, which takes two arguments: templated code (string), and a context used to render it (dictionary).
|
||||
|
||||
The context is the bundle of information that is in "scope" when rendering Jinja-templated code. For instance, imagine a simple Jinja template:
|
||||
```
|
||||
{% set new_value = some_macro(some_variable) %}
|
||||
```
|
||||
Both `some_macro()` and `some_variable` must be defined in that context. Otherwise, it will raise an error when rendering.
|
||||
|
||||
Different contexts are used in different places because we allow access to different methods and data in different places. Executable SQL, for example, includes all available macros and the model being run. The variables and macros in scope for Jinja defined in yaml files is much more limited.
|
||||
|
||||
### Implementation
|
||||
|
||||
The context that is passed to Jinja is always in a dictionary format, not an actual class, so a `to_dict()` is executed on a context class before it is used for rendering.
|
||||
|
||||
Each context has a `generate_<name>_context` function to create the context. `ProviderContext` subclasses have different generate functions for parsing and for execution, so that certain functions (notably `ref`, `source`, and `config`) can return different results
|
||||
|
||||
### Hierarchy
|
||||
|
||||
All contexts inherit from the `BaseContext`, which includes "pure" methods (e.g. `tojson`), `env_var()`, and `var()` (but only CLI values, passed via `--vars`).
|
||||
|
||||
Methods available in parent contexts are also available in child contexts.
|
||||
|
||||
```
|
||||
BaseContext -- core/dbt/context/base.py
|
||||
SecretContext -- core/dbt/context/secret.py
|
||||
TargetContext -- core/dbt/context/target.py
|
||||
ConfiguredContext -- core/dbt/context/configured.py
|
||||
SchemaYamlContext -- core/dbt/context/configured.py
|
||||
DocsRuntimeContext -- core/dbt/context/configured.py
|
||||
MacroResolvingContext -- core/dbt/context/configured.py
|
||||
ManifestContext -- core/dbt/context/manifest.py
|
||||
QueryHeaderContext -- core/dbt/context/manifest.py
|
||||
ProviderContext -- core/dbt/context/provider.py
|
||||
MacroContext -- core/dbt/context/provider.py
|
||||
ModelContext -- core/dbt/context/provider.py
|
||||
TestContext -- core/dbt/context/provider.py
|
||||
```
|
||||
|
||||
### Contexts for configuration
|
||||
|
||||
Contexts for rendering "special" `.yml` (configuration) files:
|
||||
- `SecretContext`: Supports "secret" env vars, which are prefixed with `DBT_ENV_SECRET_`. Used for rendering in `profiles.yml` and `packages.yml` ONLY. Secrets defined elsewhere will raise explicit errors.
|
||||
- `TargetContext`: The same as `Base`, plus `target` (connection profile). Used most notably in `dbt_project.yml` and `selectors.yml`.
|
||||
|
||||
Contexts for other `.yml` files in the project:
|
||||
- `SchemaYamlContext`: Supports `vars` declared on the CLI and in `dbt_project.yml`. Does not support custom macros, beyond `var()` + `env_var()` methods. Used for all `.yml` files, to define properties and configuration.
|
||||
- `DocsRuntimeContext`: Standard `.yml` file context, plus `doc()` method (with all `docs` blocks in scope). Used to resolve `description` properties.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set
|
||||
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
@@ -8,8 +8,9 @@ from dbt.clients.jinja import get_rendered
|
||||
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
|
||||
from dbt.contracts.graph.compiled import CompiledResource
|
||||
from dbt.exceptions import (
|
||||
raise_compiler_error,
|
||||
CompilationException,
|
||||
MacroReturn,
|
||||
raise_compiler_error,
|
||||
raise_parsing_error,
|
||||
disallow_secret_env_var,
|
||||
)
|
||||
@@ -23,39 +24,9 @@ from dbt.version import __version__ as dbt_version
|
||||
import pytz
|
||||
import datetime
|
||||
import re
|
||||
import itertools
|
||||
|
||||
# Contexts in dbt Core
|
||||
# Contexts are used for Jinja rendering. They include context methods,
|
||||
# executable macros, and various settings that are available in Jinja.
|
||||
#
|
||||
# Different contexts are used in different places because we allow access
|
||||
# to different methods and data in different places. Executable SQL, for
|
||||
# example, includes the available macros and the model, while Jinja in
|
||||
# yaml files is more limited.
|
||||
#
|
||||
# The context that is passed to Jinja is always in a dictionary format,
|
||||
# not an actual class, so a 'to_dict()' is executed on a context class
|
||||
# before it is used for rendering.
|
||||
#
|
||||
# Each context has a generate_<name>_context function to create the context.
|
||||
# ProviderContext subclasses have different generate functions for
|
||||
# parsing and for execution.
|
||||
#
|
||||
# Context class hierarchy
|
||||
#
|
||||
# BaseContext -- core/dbt/context/base.py
|
||||
# SecretContext -- core/dbt/context/secret.py
|
||||
# TargetContext -- core/dbt/context/target.py
|
||||
# ConfiguredContext -- core/dbt/context/configured.py
|
||||
# SchemaYamlContext -- core/dbt/context/configured.py
|
||||
# DocsRuntimeContext -- core/dbt/context/configured.py
|
||||
# MacroResolvingContext -- core/dbt/context/configured.py
|
||||
# ManifestContext -- core/dbt/context/manifest.py
|
||||
# QueryHeaderContext -- core/dbt/context/manifest.py
|
||||
# ProviderContext -- core/dbt/context/provider.py
|
||||
# MacroContext -- core/dbt/context/provider.py
|
||||
# ModelContext -- core/dbt/context/provider.py
|
||||
# TestContext -- core/dbt/context/provider.py
|
||||
# See the `contexts` module README for more information on how contexts work
|
||||
|
||||
|
||||
def get_pytz_module_context() -> Dict[str, Any]:
|
||||
@@ -77,11 +48,35 @@ def get_re_module_context() -> Dict[str, Any]:
|
||||
return {name: getattr(re, name) for name in context_exports}
|
||||
|
||||
|
||||
def get_itertools_module_context() -> Dict[str, Any]:
|
||||
# Excluded dropwhile, filterfalse, takewhile and groupby;
|
||||
# first 3 illogical for Jinja and last redundant.
|
||||
context_exports = [
|
||||
"count",
|
||||
"cycle",
|
||||
"repeat",
|
||||
"accumulate",
|
||||
"chain",
|
||||
"compress",
|
||||
"islice",
|
||||
"starmap",
|
||||
"tee",
|
||||
"zip_longest",
|
||||
"product",
|
||||
"permutations",
|
||||
"combinations",
|
||||
"combinations_with_replacement",
|
||||
]
|
||||
|
||||
return {name: getattr(itertools, name) for name in context_exports}
|
||||
|
||||
|
||||
def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
return {
|
||||
"pytz": get_pytz_module_context(),
|
||||
"datetime": get_datetime_module_context(),
|
||||
"re": get_re_module_context(),
|
||||
"itertools": get_itertools_module_context(),
|
||||
}
|
||||
|
||||
|
||||
@@ -457,6 +452,94 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except (ValueError, yaml.YAMLError):
|
||||
return default
|
||||
|
||||
@contextmember("set")
|
||||
@staticmethod
|
||||
def _set(value: Iterable[Any], default: Any = None) -> Optional[Set[Any]]:
|
||||
"""The `set` context method can be used to convert any iterable
|
||||
to a sequence of iterable elements that are unique (a set).
|
||||
|
||||
:param value: The iterable
|
||||
:param default: A default value to return if the `value` argument
|
||||
is not an iterable
|
||||
|
||||
Usage:
|
||||
{% set my_list = [1, 2, 2, 3] %}
|
||||
{% set my_set = set(my_list) %}
|
||||
{% do log(my_set) %} {# {1, 2, 3} #}
|
||||
"""
|
||||
try:
|
||||
return set(value)
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def try_set(value: Iterable[Any]) -> Set[Any]:
|
||||
"""The `try_set` context method can be used to convert any iterable
|
||||
to a sequence of iterable elements that are unique (a set). The
|
||||
difference to the `set` context method is that the `try_set` method
|
||||
will raise an exception on a TypeError.
|
||||
|
||||
:param value: The iterable
|
||||
:param default: A default value to return if the `value` argument
|
||||
is not an iterable
|
||||
|
||||
Usage:
|
||||
{% set my_list = [1, 2, 2, 3] %}
|
||||
{% set my_set = try_set(my_list) %}
|
||||
{% do log(my_set) %} {# {1, 2, 3} #}
|
||||
"""
|
||||
try:
|
||||
return set(value)
|
||||
except TypeError as e:
|
||||
raise CompilationException(e)
|
||||
|
||||
@contextmember("zip")
|
||||
@staticmethod
|
||||
def _zip(*args: Iterable[Any], default: Any = None) -> Optional[Iterable[Any]]:
|
||||
"""The `try_zip` context method can be used to used to return
|
||||
an iterator of tuples, where the i-th tuple contains the i-th
|
||||
element from each of the argument iterables.
|
||||
|
||||
:param *args: Any number of iterables
|
||||
:param default: A default value to return if `*args` is not
|
||||
iterable
|
||||
|
||||
Usage:
|
||||
{% set my_list_a = [1, 2] %}
|
||||
{% set my_list_b = ['alice', 'bob'] %}
|
||||
{% set my_zip = zip(my_list_a, my_list_b) | list %}
|
||||
{% do log(my_set) %} {# [(1, 'alice'), (2, 'bob')] #}
|
||||
"""
|
||||
try:
|
||||
return zip(*args)
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def try_zip(*args: Iterable[Any]) -> Iterable[Any]:
|
||||
"""The `try_zip` context method can be used to used to return
|
||||
an iterator of tuples, where the i-th tuple contains the i-th
|
||||
element from each of the argument iterables. The difference to the
|
||||
`zip` context method is that the `try_zip` method will raise an
|
||||
exception on a TypeError.
|
||||
|
||||
:param *args: Any number of iterables
|
||||
:param default: A default value to return if `*args` is not
|
||||
iterable
|
||||
|
||||
Usage:
|
||||
{% set my_list_a = [1, 2] %}
|
||||
{% set my_list_b = ['alice', 'bob'] %}
|
||||
{% set my_zip = try_zip(my_list_a, my_list_b) | list %}
|
||||
{% do log(my_set) %} {# [(1, 'alice'), (2, 'bob')] #}
|
||||
"""
|
||||
try:
|
||||
return zip(*args)
|
||||
except TypeError as e:
|
||||
raise CompilationException(e)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def log(msg: str, info: bool = False) -> str:
|
||||
@@ -569,7 +652,9 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{{ print("Running some_macro: " ~ arg1 ~ ", " ~ arg2) }}
|
||||
{% endmacro %}"
|
||||
"""
|
||||
print(msg)
|
||||
|
||||
if not flags.NO_PRINT:
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from dataclasses import dataclass
|
||||
from typing import List, Iterator, Dict, Any, TypeVar, Generic
|
||||
|
||||
from dbt.config import RuntimeConfig, Project, IsFQNResource
|
||||
from dbt.contracts.graph.model_config import BaseConfig, get_config_for
|
||||
from dbt.contracts.graph.model_config import BaseConfig, get_config_for, _listify
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import fqn_search
|
||||
@@ -264,18 +264,49 @@ class ContextConfig:
|
||||
|
||||
@classmethod
|
||||
def _add_config_call(cls, config_call_dict, opts: Dict[str, Any]) -> None:
|
||||
# config_call_dict is already encountered configs, opts is new
|
||||
# This mirrors code in _merge_field_value in model_config.py which is similar but
|
||||
# operates on config objects.
|
||||
for k, v in opts.items():
|
||||
# MergeBehavior for post-hook and pre-hook is to collect all
|
||||
# values, instead of overwriting
|
||||
if k in BaseConfig.mergebehavior["append"]:
|
||||
if not isinstance(v, list):
|
||||
v = [v]
|
||||
if k in BaseConfig.mergebehavior["update"] and not isinstance(v, dict):
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
if k in config_call_dict and isinstance(config_call_dict[k], list):
|
||||
config_call_dict[k].extend(v)
|
||||
elif k in config_call_dict and isinstance(config_call_dict[k], dict):
|
||||
config_call_dict[k].update(v)
|
||||
if k in config_call_dict: # should always be a list here
|
||||
config_call_dict[k].extend(v)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
|
||||
elif k in BaseConfig.mergebehavior["update"]:
|
||||
if not isinstance(v, dict):
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
if k in config_call_dict and isinstance(config_call_dict[k], dict):
|
||||
config_call_dict[k].update(v)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
elif k in BaseConfig.mergebehavior["dict_key_append"]:
|
||||
if not isinstance(v, dict):
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
if k in config_call_dict: # should always be a dict
|
||||
for key, value in v.items():
|
||||
extend = False
|
||||
# This might start with a +, to indicate we should extend the list
|
||||
# instead of just clobbering it
|
||||
if key.startswith("+"):
|
||||
extend = True
|
||||
if key in config_call_dict[k] and extend:
|
||||
# extend the list
|
||||
config_call_dict[k][key].extend(_listify(value))
|
||||
else:
|
||||
# clobber the list
|
||||
config_call_dict[k][key] = _listify(value)
|
||||
else:
|
||||
# This is always a dictionary
|
||||
config_call_dict[k] = v
|
||||
# listify everything
|
||||
for key, value in config_call_dict[k].items():
|
||||
config_call_dict[k][key] = _listify(value)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
|
||||
|
||||
@@ -62,6 +62,8 @@ from dbt.node_types import NodeType
|
||||
|
||||
from dbt.utils import merge, AttrDict, MultiDict
|
||||
|
||||
from dbt import selected_resources
|
||||
|
||||
import agate
|
||||
|
||||
|
||||
@@ -1143,6 +1145,15 @@ class ProviderContext(ManifestContext):
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
|
||||
@contextproperty
|
||||
def selected_resources(self) -> List[str]:
|
||||
"""The `selected_resources` variable contains a list of the resources
|
||||
selected based on the parameters provided to the dbt command.
|
||||
Currently, is not populated for the command `run-operation` that
|
||||
doesn't support `--select`.
|
||||
"""
|
||||
return selected_resources.SELECTED_RESOURCES
|
||||
|
||||
|
||||
class MacroContext(ProviderContext):
|
||||
"""Internally, macros can be executed like nodes, with some restrictions:
|
||||
|
||||
@@ -7,6 +7,9 @@ from dbt.exceptions import raise_parsing_error
|
||||
from dbt.logger import SECRET_ENV_PREFIX
|
||||
|
||||
|
||||
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
||||
|
||||
|
||||
class SecretContext(BaseContext):
|
||||
"""This context is used in profiles.yml + packages.yml. It can render secret
|
||||
env vars that aren't usable elsewhere"""
|
||||
@@ -18,21 +21,29 @@ class SecretContext(BaseContext):
|
||||
|
||||
If the default is None, raise an exception for an undefined variable.
|
||||
|
||||
In this context *only*, env_var will return the actual values of
|
||||
env vars prefixed with DBT_ENV_SECRET_
|
||||
In this context *only*, env_var will accept env vars prefixed with DBT_ENV_SECRET_.
|
||||
It will return the name of the secret env var, wrapped in 'start' and 'end' identifiers.
|
||||
The actual value will be subbed in later in SecretRenderer.render_value()
|
||||
"""
|
||||
return_value = None
|
||||
if var in os.environ:
|
||||
|
||||
# if this is a 'secret' env var, just return the name of the env var
|
||||
# instead of rendering the actual value here, to avoid any risk of
|
||||
# Jinja manipulation. it will be subbed out later, in SecretRenderer.render_value
|
||||
if var in os.environ and var.startswith(SECRET_ENV_PREFIX):
|
||||
return SECRET_PLACEHOLDER.format(var)
|
||||
|
||||
elif var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
return_value = default
|
||||
|
||||
if return_value is not None:
|
||||
# do not save secret environment variables
|
||||
# store env vars in the internal manifest to power partial parsing
|
||||
# if it's a 'secret' env var, we shouldn't even get here
|
||||
# but just to be safe — don't save secrets
|
||||
if not var.startswith(SECRET_ENV_PREFIX):
|
||||
self.env_vars[var] = return_value
|
||||
|
||||
# return the value even if its a secret
|
||||
return return_value
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
|
||||
@@ -1091,7 +1091,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 4)
|
||||
@schema_version("manifest", 5)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1135,6 +1135,12 @@ class WritableManifest(ArtifactMixin):
|
||||
)
|
||||
)
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
for unique_id, node in dct["nodes"].items():
|
||||
if "config_call_dict" in node:
|
||||
del node["config_call_dict"]
|
||||
return dct
|
||||
|
||||
|
||||
def _check_duplicates(value: HasUniqueID, src: Mapping[str, HasUniqueID]):
|
||||
if value.unique_id in src:
|
||||
|
||||
@@ -66,6 +66,7 @@ class MergeBehavior(Metadata):
|
||||
Append = 1
|
||||
Update = 2
|
||||
Clobber = 3
|
||||
DictKeyAppend = 4
|
||||
|
||||
@classmethod
|
||||
def default_field(cls) -> "MergeBehavior":
|
||||
@@ -124,6 +125,9 @@ def _listify(value: Any) -> List:
|
||||
return [value]
|
||||
|
||||
|
||||
# There are two versions of this code. The one here is for config
|
||||
# objects, the one in _add_config_call in context_config.py is for
|
||||
# config_call_dict dictionaries.
|
||||
def _merge_field_value(
|
||||
merge_behavior: MergeBehavior,
|
||||
self_value: Any,
|
||||
@@ -141,6 +145,31 @@ def _merge_field_value(
|
||||
value = self_value.copy()
|
||||
value.update(other_value)
|
||||
return value
|
||||
elif merge_behavior == MergeBehavior.DictKeyAppend:
|
||||
if not isinstance(self_value, dict):
|
||||
raise InternalException(f"expected dict, got {self_value}")
|
||||
if not isinstance(other_value, dict):
|
||||
raise InternalException(f"expected dict, got {other_value}")
|
||||
new_dict = {}
|
||||
for key in self_value.keys():
|
||||
new_dict[key] = _listify(self_value[key])
|
||||
for key in other_value.keys():
|
||||
extend = False
|
||||
new_key = key
|
||||
# This might start with a +, to indicate we should extend the list
|
||||
# instead of just clobbering it
|
||||
if new_key.startswith("+"):
|
||||
new_key = key.lstrip("+")
|
||||
extend = True
|
||||
if new_key in new_dict and extend:
|
||||
# extend the list
|
||||
value = other_value[key]
|
||||
new_dict[new_key].extend(_listify(value))
|
||||
else:
|
||||
# clobber the list
|
||||
new_dict[new_key] = _listify(other_value[key])
|
||||
return new_dict
|
||||
|
||||
else:
|
||||
raise InternalException(f"Got an invalid merge_behavior: {merge_behavior}")
|
||||
|
||||
@@ -257,6 +286,7 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
mergebehavior = {
|
||||
"append": ["pre-hook", "pre_hook", "post-hook", "post_hook", "tags"],
|
||||
"update": ["quoting", "column_types", "meta"],
|
||||
"dict_key_append": ["grants"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -335,6 +365,40 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
@dataclass
|
||||
class SourceConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
# to be implmented to complete CT-201
|
||||
# quoting: Dict[str, Any] = field(
|
||||
# default_factory=dict,
|
||||
# metadata=MergeBehavior.Update.meta(),
|
||||
# )
|
||||
# freshness: Optional[Dict[str, Any]] = field(
|
||||
# default=None,
|
||||
# metadata=CompareBehavior.Exclude.meta(),
|
||||
# )
|
||||
# loader: Optional[str] = field(
|
||||
# default=None,
|
||||
# metadata=CompareBehavior.Exclude.meta(),
|
||||
# )
|
||||
# # TODO what type is this? docs say: "<column_name_or_expression>"
|
||||
# loaded_at_field: Optional[str] = field(
|
||||
# default=None,
|
||||
# metadata=CompareBehavior.Exclude.meta(),
|
||||
# )
|
||||
# database: Optional[str] = field(
|
||||
# default=None,
|
||||
# metadata=CompareBehavior.Exclude.meta(),
|
||||
# )
|
||||
# schema: Optional[str] = field(
|
||||
# default=None,
|
||||
# metadata=CompareBehavior.Exclude.meta(),
|
||||
# )
|
||||
# meta: Dict[str, Any] = field(
|
||||
# default_factory=dict,
|
||||
# metadata=MergeBehavior.Update.meta(),
|
||||
# )
|
||||
# tags: Union[List[str], str] = field(
|
||||
# default_factory=list_str,
|
||||
# metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
||||
# )
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -393,6 +457,9 @@ class NodeConfig(NodeAndTestConfig):
|
||||
# sometimes getting the Union order wrong, causing serialization failures.
|
||||
unique_key: Union[str, List[str], None] = None
|
||||
on_schema_change: Optional[str] = "ignore"
|
||||
grants: Dict[str, Any] = field(
|
||||
default_factory=dict, metadata=MergeBehavior.DictKeyAppend.meta()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
|
||||
@@ -233,8 +233,6 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
return self.to_dict()
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
if "config_call_dict" in dct:
|
||||
del dct["config_call_dict"]
|
||||
if "_event_status" in dct:
|
||||
del dct["_event_status"]
|
||||
return dct
|
||||
@@ -586,10 +584,7 @@ class UnpatchedSourceDefinition(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
|
||||
@property
|
||||
def columns(self) -> Sequence[UnparsedColumn]:
|
||||
if self.table.columns is None:
|
||||
return []
|
||||
else:
|
||||
return self.table.columns
|
||||
return [] if self.table.columns is None else self.table.columns
|
||||
|
||||
def get_tests(self) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]:
|
||||
for test in self.tests:
|
||||
|
||||
@@ -7,7 +7,7 @@ from dbt.contracts.util import (
|
||||
|
||||
# trigger the PathEncoder
|
||||
import dbt.helper_types # noqa:F401
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import CompilationException, ParsingException
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin
|
||||
|
||||
@@ -242,6 +242,7 @@ class Quoting(dbtClassMixin, Mergeable):
|
||||
|
||||
@dataclass
|
||||
class UnparsedSourceTableDefinition(HasColumnTests, HasTests):
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
loaded_at_field: Optional[str] = None
|
||||
identifier: Optional[str] = None
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
@@ -322,6 +323,7 @@ class SourcePatch(dbtClassMixin, Replaceable):
|
||||
path: Path = field(
|
||||
metadata=dict(description="The path to the patch-defining yml file"),
|
||||
)
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
description: Optional[str] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
database: Optional[str] = None
|
||||
@@ -458,3 +460,9 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
filters: List[MetricFilter] = field(default_factory=list)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super(UnparsedMetric, cls).validate(data)
|
||||
if "name" in data and " " in data["name"]:
|
||||
raise ParsingException(f"Metrics name '{data['name']}' cannot contain spaces")
|
||||
|
||||
@@ -253,6 +253,7 @@ class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract):
|
||||
use_experimental_parser: Optional[bool] = None
|
||||
static_parser: Optional[bool] = None
|
||||
indirect_selection: Optional[str] = None
|
||||
cache_selected_only: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
from pathlib import Path
|
||||
from .graph.manifest import WritableManifest
|
||||
from .results import RunResultsArtifact
|
||||
from .results import FreshnessExecutionResultArtifact
|
||||
from typing import Optional
|
||||
from dbt.exceptions import IncompatibleSchemaException
|
||||
|
||||
|
||||
class PreviousState:
|
||||
def __init__(self, path: Path):
|
||||
def __init__(self, path: Path, current_path: Path):
|
||||
self.path: Path = path
|
||||
self.current_path: Path = current_path
|
||||
self.manifest: Optional[WritableManifest] = None
|
||||
self.results: Optional[RunResultsArtifact] = None
|
||||
self.sources: Optional[FreshnessExecutionResultArtifact] = None
|
||||
self.sources_current: Optional[FreshnessExecutionResultArtifact] = None
|
||||
|
||||
manifest_path = self.path / "manifest.json"
|
||||
if manifest_path.exists() and manifest_path.is_file():
|
||||
try:
|
||||
# we want to bail with an error if schema versions don't match
|
||||
self.manifest = WritableManifest.read_and_check_versions(str(manifest_path))
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(manifest_path))
|
||||
@@ -23,8 +26,27 @@ class PreviousState:
|
||||
results_path = self.path / "run_results.json"
|
||||
if results_path.exists() and results_path.is_file():
|
||||
try:
|
||||
# we want to bail with an error if schema versions don't match
|
||||
self.results = RunResultsArtifact.read_and_check_versions(str(results_path))
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(results_path))
|
||||
raise
|
||||
|
||||
sources_path = self.path / "sources.json"
|
||||
if sources_path.exists() and sources_path.is_file():
|
||||
try:
|
||||
self.sources = FreshnessExecutionResultArtifact.read_and_check_versions(
|
||||
str(sources_path)
|
||||
)
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(sources_path))
|
||||
raise
|
||||
|
||||
sources_current_path = self.current_path / "sources.json"
|
||||
if sources_current_path.exists() and sources_current_path.is_file():
|
||||
try:
|
||||
self.sources_current = FreshnessExecutionResultArtifact.read_and_check_versions(
|
||||
str(sources_current_path)
|
||||
)
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(sources_current_path))
|
||||
raise
|
||||
|
||||
@@ -64,7 +64,7 @@ class Event(metaclass=ABCMeta):
|
||||
|
||||
# in theory threads can change so we don't cache them.
|
||||
def get_thread_name(self) -> str:
|
||||
return threading.current_thread().getName()
|
||||
return threading.current_thread().name
|
||||
|
||||
@classmethod
|
||||
def get_invocation_id(cls) -> str:
|
||||
|
||||
@@ -15,7 +15,7 @@ def format_fancy_output_line(
|
||||
progress = ""
|
||||
else:
|
||||
progress = "{} of {} ".format(index, total)
|
||||
prefix = "{progress}{message}".format(progress=progress, message=msg)
|
||||
prefix = "{progress}{message} ".format(progress=progress, message=msg)
|
||||
|
||||
truncate_width = ui.printer_width() - 3
|
||||
justified = prefix.ljust(ui.printer_width(), ".")
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import colorama
|
||||
from colorama import Style
|
||||
import dbt.events.functions as this # don't worry I hate it too.
|
||||
from dbt.events.base_types import NoStdOut, Event, NoFile, ShowException, Cache
|
||||
@@ -50,14 +49,6 @@ format_color = True
|
||||
format_json = False
|
||||
invocation_id: Optional[str] = None
|
||||
|
||||
# Colorama needs some help on windows because we're using logger.info
|
||||
# intead of print(). If the Windows env doesn't have a TERM var set,
|
||||
# then we should override the logging stream to use the colorama
|
||||
# converter. If the TERM var is set (as with Git Bash), then it's safe
|
||||
# to send escape characters and no log handler injection is needed.
|
||||
if sys.platform == "win32":
|
||||
colorama.init(wrap=False)
|
||||
|
||||
|
||||
def setup_event_logger(log_path, level_override=None):
|
||||
# flags have been resolved, and log_path is known
|
||||
@@ -194,8 +185,8 @@ def create_debug_text_log_line(e: T_Event) -> str:
|
||||
scrubbed_msg: str = scrub_secrets(e.message(), env_secrets())
|
||||
level: str = e.level_tag() if len(e.level_tag()) == 5 else f"{e.level_tag()} "
|
||||
thread = ""
|
||||
if threading.current_thread().getName():
|
||||
thread_name = threading.current_thread().getName()
|
||||
if threading.current_thread().name:
|
||||
thread_name = threading.current_thread().name
|
||||
thread_name = thread_name[:10]
|
||||
thread_name = thread_name.ljust(10, " ")
|
||||
thread = f" [{thread_name}]:"
|
||||
|
||||
@@ -291,6 +291,25 @@ class GitProgressCheckedOutAt(DebugLevel):
|
||||
return f" Checked out at {self.end_sha}."
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryIndexProgressMakingGETRequest(DebugLevel):
|
||||
url: str
|
||||
code: str = "M022"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Making package index registry request: GET {self.url}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryIndexProgressGETResponse(DebugLevel):
|
||||
url: str
|
||||
resp_code: int
|
||||
code: str = "M023"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Response from registry index: GET {self.url} {self.resp_code}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryProgressMakingGETRequest(DebugLevel):
|
||||
url: str
|
||||
@@ -310,6 +329,45 @@ class RegistryProgressGETResponse(DebugLevel):
|
||||
return f"Response from registry: GET {self.url} {self.resp_code}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryResponseUnexpectedType(DebugLevel):
|
||||
response: str
|
||||
code: str = "M024"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Response was None: {self.response}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryResponseMissingTopKeys(DebugLevel):
|
||||
response: str
|
||||
code: str = "M025"
|
||||
|
||||
def message(self) -> str:
|
||||
# expected/actual keys logged in exception
|
||||
return f"Response missing top level keys: {self.response}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryResponseMissingNestedKeys(DebugLevel):
|
||||
response: str
|
||||
code: str = "M026"
|
||||
|
||||
def message(self) -> str:
|
||||
# expected/actual keys logged in exception
|
||||
return f"Response missing nested keys: {self.response}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryResponseExtraNestedKeys(DebugLevel):
|
||||
response: str
|
||||
code: str = "M027"
|
||||
|
||||
def message(self) -> str:
|
||||
# expected/actual keys logged in exception
|
||||
return f"Response contained inconsistent keys: {self.response}"
|
||||
|
||||
|
||||
# TODO this was actually `logger.exception(...)` not `logger.error(...)`
|
||||
@dataclass
|
||||
class SystemErrorRetrievingModTime(ErrorLevel):
|
||||
@@ -2294,11 +2352,15 @@ class WritingInjectedSQLForNode(DebugLevel):
|
||||
|
||||
|
||||
@dataclass
|
||||
class DisableTracking(WarnLevel):
|
||||
class DisableTracking(DebugLevel):
|
||||
code: str = "Z039"
|
||||
|
||||
def message(self) -> str:
|
||||
return "Error sending message, disabling tracking"
|
||||
return (
|
||||
"Error sending anonymous usage statistics. Disabling tracking for this execution. "
|
||||
"If you wish to permanently disable tracking, see: "
|
||||
"https://docs.getdbt.com/reference/global-configs#send-anonymous-usage-stats."
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -2346,7 +2408,7 @@ class TrackingInitializeFailure(ShowException, DebugLevel):
|
||||
class RetryExternalCall(DebugLevel):
|
||||
attempt: int
|
||||
max: int
|
||||
code: str = "Z045"
|
||||
code: str = "M020"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Retrying external call. Attempt: {self.attempt} Max attempts: {self.max}"
|
||||
@@ -2359,9 +2421,7 @@ class GeneralWarningMsg(WarnLevel):
|
||||
code: str = "Z046"
|
||||
|
||||
def message(self) -> str:
|
||||
if self.log_fmt is not None:
|
||||
return self.log_fmt.format(self.msg)
|
||||
return self.msg
|
||||
return self.log_fmt.format(self.msg) if self.log_fmt is not None else self.msg
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -2371,9 +2431,7 @@ class GeneralWarningException(WarnLevel):
|
||||
code: str = "Z047"
|
||||
|
||||
def message(self) -> str:
|
||||
if self.log_fmt is not None:
|
||||
return self.log_fmt.format(str(self.exc))
|
||||
return str(self.exc)
|
||||
return self.log_fmt.format(str(self.exc)) if self.log_fmt is not None else str(self.exc)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -2381,7 +2439,19 @@ class EventBufferFull(WarnLevel):
|
||||
code: str = "Z048"
|
||||
|
||||
def message(self) -> str:
|
||||
return "Internal event buffer full. Earliest events will be dropped (FIFO)."
|
||||
return (
|
||||
"Internal logging/event buffer full."
|
||||
"Earliest logs/events will be dropped as new ones are fired (FIFO)."
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecordRetryException(DebugLevel):
|
||||
exc: Exception
|
||||
code: str = "M021"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"External call exception: {self.exc}"
|
||||
|
||||
|
||||
# since mypy doesn't run on every file we need to suggest to mypy that every
|
||||
@@ -2413,6 +2483,14 @@ if 1 == 0:
|
||||
GitNothingToDo(sha="")
|
||||
GitProgressUpdatedCheckoutRange(start_sha="", end_sha="")
|
||||
GitProgressCheckedOutAt(end_sha="")
|
||||
RegistryIndexProgressMakingGETRequest(url="")
|
||||
RegistryIndexProgressGETResponse(url="", resp_code=1234)
|
||||
RegistryProgressMakingGETRequest(url="")
|
||||
RegistryProgressGETResponse(url="", resp_code=1234)
|
||||
RegistryResponseUnexpectedType(response=""),
|
||||
RegistryResponseMissingTopKeys(response=""),
|
||||
RegistryResponseMissingNestedKeys(response=""),
|
||||
RegistryResponseExtraNestedKeys(response=""),
|
||||
SystemErrorRetrievingModTime(path="")
|
||||
SystemCouldNotWrite(path="", reason="", exc=Exception(""))
|
||||
SystemExecutingCmd(cmd=[""])
|
||||
@@ -2737,3 +2815,4 @@ if 1 == 0:
|
||||
GeneralWarningMsg(msg="", log_fmt="")
|
||||
GeneralWarningException(exc=Exception(""), log_fmt="")
|
||||
EventBufferFull()
|
||||
RecordRetryException(exc=Exception(""))
|
||||
|
||||
@@ -383,10 +383,11 @@ class FailedToConnectException(DatabaseException):
|
||||
|
||||
class CommandError(RuntimeException):
|
||||
def __init__(self, cwd, cmd, message="Error running command"):
|
||||
cmd_scrubbed = list(scrub_secrets(cmd_txt, env_secrets()) for cmd_txt in cmd)
|
||||
super().__init__(message)
|
||||
self.cwd = cwd
|
||||
self.cmd = cmd
|
||||
self.args = (cwd, cmd, message)
|
||||
self.cmd = cmd_scrubbed
|
||||
self.args = (cwd, cmd_scrubbed, message)
|
||||
|
||||
def __str__(self):
|
||||
if len(self.cmd) == 0:
|
||||
@@ -411,9 +412,9 @@ class CommandResultError(CommandError):
|
||||
def __init__(self, cwd, cmd, returncode, stdout, stderr, message="Got a non-zero returncode"):
|
||||
super().__init__(cwd, cmd, message)
|
||||
self.returncode = returncode
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
self.args = (cwd, cmd, returncode, stdout, stderr, message)
|
||||
self.stdout = scrub_secrets(stdout.decode("utf-8"), env_secrets())
|
||||
self.stderr = scrub_secrets(stderr.decode("utf-8"), env_secrets())
|
||||
self.args = (cwd, self.cmd, returncode, self.stdout, self.stderr, message)
|
||||
|
||||
def __str__(self):
|
||||
return "{} running: {}".format(self.msg, self.cmd)
|
||||
@@ -436,6 +437,10 @@ class InvalidSelectorException(RuntimeException):
|
||||
super().__init__(name)
|
||||
|
||||
|
||||
class DuplicateYamlKeyException(CompilationException):
|
||||
pass
|
||||
|
||||
|
||||
def raise_compiler_error(msg, node=None) -> NoReturn:
|
||||
raise CompilationException(msg, node)
|
||||
|
||||
@@ -704,7 +709,6 @@ def missing_materialization(model, adapter_type):
|
||||
|
||||
def bad_package_spec(repo, spec, error_message):
|
||||
msg = "Error checking out spec='{}' for repo {}\n{}".format(spec, repo, error_message)
|
||||
|
||||
raise InternalException(scrub_secrets(msg, env_secrets()))
|
||||
|
||||
|
||||
@@ -838,31 +842,47 @@ def raise_duplicate_macro_name(node_1, node_2, namespace) -> NoReturn:
|
||||
|
||||
def raise_duplicate_resource_name(node_1, node_2):
|
||||
duped_name = node_1.name
|
||||
node_type = NodeType(node_1.resource_type)
|
||||
pluralized = (
|
||||
node_type.pluralize()
|
||||
if node_1.resource_type == node_2.resource_type
|
||||
else "resources" # still raise if ref() collision, e.g. model + seed
|
||||
)
|
||||
|
||||
if node_1.resource_type in NodeType.refable():
|
||||
get_func = 'ref("{}")'.format(duped_name)
|
||||
elif node_1.resource_type == NodeType.Source:
|
||||
action = "looking for"
|
||||
# duplicate 'ref' targets
|
||||
if node_type in NodeType.refable():
|
||||
formatted_name = f'ref("{duped_name}")'
|
||||
# duplicate sources
|
||||
elif node_type == NodeType.Source:
|
||||
duped_name = node_1.get_full_source_name()
|
||||
get_func = node_1.get_source_representation()
|
||||
elif node_1.resource_type == NodeType.Documentation:
|
||||
get_func = 'doc("{}")'.format(duped_name)
|
||||
elif node_1.resource_type == NodeType.Test and "schema" in node_1.tags:
|
||||
return
|
||||
formatted_name = node_1.get_source_representation()
|
||||
# duplicate docs blocks
|
||||
elif node_type == NodeType.Documentation:
|
||||
formatted_name = f'doc("{duped_name}")'
|
||||
# duplicate generic tests
|
||||
elif node_type == NodeType.Test and hasattr(node_1, "test_metadata"):
|
||||
column_name = f'column "{node_1.column_name}" in ' if node_1.column_name else ""
|
||||
model_name = node_1.file_key_name
|
||||
duped_name = f'{node_1.name}" defined on {column_name}"{model_name}'
|
||||
action = "running"
|
||||
formatted_name = "tests"
|
||||
# all other resource types
|
||||
else:
|
||||
get_func = '"{}"'.format(duped_name)
|
||||
formatted_name = duped_name
|
||||
|
||||
# should this be raise_parsing_error instead?
|
||||
raise_compiler_error(
|
||||
'dbt found two resources with the name "{}". Since these resources '
|
||||
"have the same name,\ndbt will be unable to find the correct resource "
|
||||
"when {} is used. To fix this,\nchange the name of one of "
|
||||
"these resources:\n- {} ({})\n- {} ({})".format(
|
||||
duped_name,
|
||||
get_func,
|
||||
node_1.unique_id,
|
||||
node_1.original_file_path,
|
||||
node_2.unique_id,
|
||||
node_2.original_file_path,
|
||||
)
|
||||
f"""
|
||||
dbt found two {pluralized} with the name "{duped_name}".
|
||||
|
||||
Since these resources have the same name, dbt will be unable to find the correct resource
|
||||
when {action} {formatted_name}.
|
||||
|
||||
To fix this, change the name of one of these resources:
|
||||
- {node_1.unique_id} ({node_1.original_file_path})
|
||||
- {node_2.unique_id} ({node_2.original_file_path})
|
||||
""".strip()
|
||||
)
|
||||
|
||||
|
||||
@@ -887,7 +907,8 @@ def raise_ambiguous_alias(node_1, node_2, duped_name=None):
|
||||
def raise_ambiguous_catalog_match(unique_id, match_1, match_2):
|
||||
def get_match_string(match):
|
||||
return "{}.{}".format(
|
||||
match.get("metadata", {}).get("schema"), match.get("metadata", {}).get("name")
|
||||
match.get("metadata", {}).get("schema"),
|
||||
match.get("metadata", {}).get("name"),
|
||||
)
|
||||
|
||||
raise_compiler_error(
|
||||
@@ -966,11 +987,11 @@ def raise_duplicate_source_patch_name(patch_1, patch_2):
|
||||
)
|
||||
|
||||
|
||||
def raise_invalid_schema_yml_version(path, issue):
|
||||
def raise_invalid_property_yml_version(path, issue):
|
||||
raise_compiler_error(
|
||||
"The schema file at {} is invalid because {}. Please consult the "
|
||||
"documentation for more information on schema.yml syntax:\n\n"
|
||||
"https://docs.getdbt.com/docs/schemayml-files".format(path, issue)
|
||||
"The yml property file at {} is invalid because {}. Please consult the "
|
||||
"documentation for more information on yml property file syntax:\n\n"
|
||||
"https://docs.getdbt.com/reference/configs-and-properties".format(path, issue)
|
||||
)
|
||||
|
||||
|
||||
@@ -1048,7 +1069,7 @@ CONTEXT_EXPORTS = {
|
||||
raise_dependency_error,
|
||||
raise_duplicate_patch_name,
|
||||
raise_duplicate_resource_name,
|
||||
raise_invalid_schema_yml_version,
|
||||
raise_invalid_property_yml_version,
|
||||
raise_not_implemented,
|
||||
relation_wrong_type,
|
||||
]
|
||||
|
||||
@@ -35,6 +35,18 @@ INDIRECT_SELECTION = None
|
||||
LOG_CACHE_EVENTS = None
|
||||
EVENT_BUFFER_SIZE = 100000
|
||||
QUIET = None
|
||||
NO_PRINT = None
|
||||
CACHE_SELECTED_ONLY = None
|
||||
|
||||
_NON_BOOLEAN_FLAGS = [
|
||||
"LOG_FORMAT",
|
||||
"PRINTER_WIDTH",
|
||||
"PROFILES_DIR",
|
||||
"INDIRECT_SELECTION",
|
||||
"EVENT_BUFFER_SIZE",
|
||||
]
|
||||
|
||||
_NON_DBT_ENV_FLAGS = ["DO_NOT_TRACK"]
|
||||
|
||||
# Global CLI defaults. These flags are set from three places:
|
||||
# CLI args, environment variables, and user_config (profiles.yml).
|
||||
@@ -57,6 +69,8 @@ flag_defaults = {
|
||||
"LOG_CACHE_EVENTS": False,
|
||||
"EVENT_BUFFER_SIZE": 100000,
|
||||
"QUIET": False,
|
||||
"NO_PRINT": False,
|
||||
"CACHE_SELECTED_ONLY": False,
|
||||
}
|
||||
|
||||
|
||||
@@ -106,7 +120,7 @@ def set_from_args(args, user_config):
|
||||
global STRICT_MODE, FULL_REFRESH, WARN_ERROR, USE_EXPERIMENTAL_PARSER, STATIC_PARSER
|
||||
global WRITE_JSON, PARTIAL_PARSE, USE_COLORS, STORE_FAILURES, PROFILES_DIR, DEBUG, LOG_FORMAT
|
||||
global INDIRECT_SELECTION, VERSION_CHECK, FAIL_FAST, SEND_ANONYMOUS_USAGE_STATS
|
||||
global PRINTER_WIDTH, WHICH, LOG_CACHE_EVENTS, EVENT_BUFFER_SIZE, QUIET
|
||||
global PRINTER_WIDTH, WHICH, LOG_CACHE_EVENTS, EVENT_BUFFER_SIZE, QUIET, NO_PRINT, CACHE_SELECTED_ONLY
|
||||
|
||||
STRICT_MODE = False # backwards compatibility
|
||||
# cli args without user_config or env var option
|
||||
@@ -132,32 +146,25 @@ def set_from_args(args, user_config):
|
||||
LOG_CACHE_EVENTS = get_flag_value("LOG_CACHE_EVENTS", args, user_config)
|
||||
EVENT_BUFFER_SIZE = get_flag_value("EVENT_BUFFER_SIZE", args, user_config)
|
||||
QUIET = get_flag_value("QUIET", args, user_config)
|
||||
NO_PRINT = get_flag_value("NO_PRINT", args, user_config)
|
||||
CACHE_SELECTED_ONLY = get_flag_value("CACHE_SELECTED_ONLY", args, user_config)
|
||||
|
||||
_set_overrides_from_env()
|
||||
|
||||
|
||||
def _set_overrides_from_env():
|
||||
global SEND_ANONYMOUS_USAGE_STATS
|
||||
|
||||
flag_value = _get_flag_value_from_env("DO_NOT_TRACK")
|
||||
if flag_value is None:
|
||||
return
|
||||
|
||||
SEND_ANONYMOUS_USAGE_STATS = not flag_value
|
||||
|
||||
|
||||
def get_flag_value(flag, args, user_config):
|
||||
lc_flag = flag.lower()
|
||||
flag_value = getattr(args, lc_flag, None)
|
||||
if flag_value is None:
|
||||
# Environment variables use pattern 'DBT_{flag name}'
|
||||
env_flag = f"DBT_{flag}"
|
||||
env_value = os.getenv(env_flag)
|
||||
if env_value is not None and env_value != "":
|
||||
env_value = env_value.lower()
|
||||
# non Boolean values
|
||||
if flag in [
|
||||
"LOG_FORMAT",
|
||||
"PRINTER_WIDTH",
|
||||
"PROFILES_DIR",
|
||||
"INDIRECT_SELECTION",
|
||||
"EVENT_BUFFER_SIZE",
|
||||
]:
|
||||
flag_value = env_value
|
||||
else:
|
||||
flag_value = env_set_bool(env_value)
|
||||
elif user_config is not None and getattr(user_config, lc_flag, None) is not None:
|
||||
flag_value = getattr(user_config, lc_flag)
|
||||
else:
|
||||
flag_value = flag_defaults[flag]
|
||||
flag_value = _load_flag_value(flag, args, user_config)
|
||||
|
||||
if flag in ["PRINTER_WIDTH", "EVENT_BUFFER_SIZE"]: # must be ints
|
||||
flag_value = int(flag_value)
|
||||
if flag == "PROFILES_DIR":
|
||||
@@ -166,6 +173,42 @@ def get_flag_value(flag, args, user_config):
|
||||
return flag_value
|
||||
|
||||
|
||||
def _load_flag_value(flag, args, user_config):
|
||||
lc_flag = flag.lower()
|
||||
flag_value = getattr(args, lc_flag, None)
|
||||
if flag_value is not None:
|
||||
return flag_value
|
||||
|
||||
flag_value = _get_flag_value_from_env(flag)
|
||||
if flag_value is not None:
|
||||
return flag_value
|
||||
|
||||
if user_config is not None and getattr(user_config, lc_flag, None) is not None:
|
||||
return getattr(user_config, lc_flag)
|
||||
|
||||
return flag_defaults[flag]
|
||||
|
||||
|
||||
def _get_flag_value_from_env(flag):
|
||||
# Environment variables use pattern 'DBT_{flag name}'
|
||||
env_flag = _get_env_flag(flag)
|
||||
env_value = os.getenv(env_flag)
|
||||
if env_value is None or env_value == "":
|
||||
return None
|
||||
|
||||
env_value = env_value.lower()
|
||||
if flag in _NON_BOOLEAN_FLAGS:
|
||||
flag_value = env_value
|
||||
else:
|
||||
flag_value = env_set_bool(env_value)
|
||||
|
||||
return flag_value
|
||||
|
||||
|
||||
def _get_env_flag(flag):
|
||||
return flag if flag in _NON_DBT_ENV_FLAGS else f"DBT_{flag}"
|
||||
|
||||
|
||||
def get_flag_dict():
|
||||
return {
|
||||
"use_experimental_parser": USE_EXPERIMENTAL_PARSER,
|
||||
@@ -185,4 +228,5 @@ def get_flag_dict():
|
||||
"log_cache_events": LOG_CACHE_EVENTS,
|
||||
"event_buffer_size": EVENT_BUFFER_SIZE,
|
||||
"quiet": QUIET,
|
||||
"no_print": NO_PRINT,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# special support for CLI argument parsing.
|
||||
from dbt import flags
|
||||
from copy import deepcopy
|
||||
import itertools
|
||||
from dbt.clients.yaml_helper import yaml, Loader, Dumper # noqa: F401
|
||||
|
||||
@@ -112,9 +113,9 @@ def _get_list_dicts(dct: Dict[str, Any], key: str) -> List[RawDefinition]:
|
||||
return result
|
||||
|
||||
|
||||
def _parse_exclusions(definition) -> Optional[SelectionSpec]:
|
||||
def _parse_exclusions(definition, result={}) -> Optional[SelectionSpec]:
|
||||
exclusions = _get_list_dicts(definition, "exclude")
|
||||
parsed_exclusions = [parse_from_definition(excl) for excl in exclusions]
|
||||
parsed_exclusions = [parse_from_definition(excl, result=result) for excl in exclusions]
|
||||
if len(parsed_exclusions) == 1:
|
||||
return parsed_exclusions[0]
|
||||
elif len(parsed_exclusions) > 1:
|
||||
@@ -124,7 +125,7 @@ def _parse_exclusions(definition) -> Optional[SelectionSpec]:
|
||||
|
||||
|
||||
def _parse_include_exclude_subdefs(
|
||||
definitions: List[RawDefinition],
|
||||
definitions: List[RawDefinition], result={}
|
||||
) -> Tuple[List[SelectionSpec], Optional[SelectionSpec]]:
|
||||
include_parts: List[SelectionSpec] = []
|
||||
diff_arg: Optional[SelectionSpec] = None
|
||||
@@ -138,16 +139,16 @@ def _parse_include_exclude_subdefs(
|
||||
f"You cannot provide multiple exclude arguments to the "
|
||||
f"same selector set operator:\n{yaml_sel_cfg}"
|
||||
)
|
||||
diff_arg = _parse_exclusions(definition)
|
||||
diff_arg = _parse_exclusions(definition, result=result)
|
||||
else:
|
||||
include_parts.append(parse_from_definition(definition))
|
||||
include_parts.append(parse_from_definition(definition, result=result))
|
||||
|
||||
return (include_parts, diff_arg)
|
||||
|
||||
|
||||
def parse_union_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
def parse_union_definition(definition: Dict[str, Any], result={}) -> SelectionSpec:
|
||||
union_def_parts = _get_list_dicts(definition, "union")
|
||||
include, exclude = _parse_include_exclude_subdefs(union_def_parts)
|
||||
include, exclude = _parse_include_exclude_subdefs(union_def_parts, result=result)
|
||||
|
||||
union = SelectionUnion(components=include)
|
||||
|
||||
@@ -158,9 +159,9 @@ def parse_union_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
return SelectionDifference(components=[union, exclude], raw=definition)
|
||||
|
||||
|
||||
def parse_intersection_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
def parse_intersection_definition(definition: Dict[str, Any], result={}) -> SelectionSpec:
|
||||
intersection_def_parts = _get_list_dicts(definition, "intersection")
|
||||
include, exclude = _parse_include_exclude_subdefs(intersection_def_parts)
|
||||
include, exclude = _parse_include_exclude_subdefs(intersection_def_parts, result=result)
|
||||
intersection = SelectionIntersection(components=include)
|
||||
|
||||
if exclude is None:
|
||||
@@ -170,7 +171,7 @@ def parse_intersection_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
return SelectionDifference(components=[intersection, exclude], raw=definition)
|
||||
|
||||
|
||||
def parse_dict_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
def parse_dict_definition(definition: Dict[str, Any], result={}) -> SelectionSpec:
|
||||
diff_arg: Optional[SelectionSpec] = None
|
||||
if len(definition) == 1:
|
||||
key = list(definition)[0]
|
||||
@@ -183,10 +184,15 @@ def parse_dict_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
"method": key,
|
||||
"value": value,
|
||||
}
|
||||
elif definition.get("method") == "selector":
|
||||
sel_def = definition.get("value")
|
||||
if sel_def not in result:
|
||||
raise ValidationException(f"Existing selector definition for {sel_def} not found.")
|
||||
return result[definition["value"]]["definition"]
|
||||
elif "method" in definition and "value" in definition:
|
||||
dct = definition
|
||||
if "exclude" in definition:
|
||||
diff_arg = _parse_exclusions(definition)
|
||||
diff_arg = _parse_exclusions(definition, result=result)
|
||||
dct = {k: v for k, v in dct.items() if k != "exclude"}
|
||||
else:
|
||||
raise ValidationException(
|
||||
@@ -202,7 +208,11 @@ def parse_dict_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
return SelectionDifference(components=[base, diff_arg])
|
||||
|
||||
|
||||
def parse_from_definition(definition: RawDefinition, rootlevel=False) -> SelectionSpec:
|
||||
def parse_from_definition(
|
||||
definition: RawDefinition,
|
||||
rootlevel=False,
|
||||
result: Dict[str, Dict[str, Union[SelectionSpec, bool]]] = {},
|
||||
) -> SelectionSpec:
|
||||
|
||||
if (
|
||||
isinstance(definition, dict)
|
||||
@@ -218,11 +228,11 @@ def parse_from_definition(definition: RawDefinition, rootlevel=False) -> Selecti
|
||||
if isinstance(definition, str):
|
||||
return SelectionCriteria.from_single_spec(definition)
|
||||
elif "union" in definition:
|
||||
return parse_union_definition(definition)
|
||||
return parse_union_definition(definition, result=result)
|
||||
elif "intersection" in definition:
|
||||
return parse_intersection_definition(definition)
|
||||
return parse_intersection_definition(definition, result=result)
|
||||
elif isinstance(definition, dict):
|
||||
return parse_dict_definition(definition)
|
||||
return parse_dict_definition(definition, result=result)
|
||||
else:
|
||||
raise ValidationException(
|
||||
f"Expected to find union, intersection, str or dict, instead "
|
||||
@@ -238,6 +248,8 @@ def parse_from_selectors_definition(
|
||||
for selector in source.selectors:
|
||||
result[selector.name] = {
|
||||
"default": selector.default,
|
||||
"definition": parse_from_definition(selector.definition, rootlevel=True),
|
||||
"definition": parse_from_definition(
|
||||
selector.definition, rootlevel=True, result=deepcopy(result)
|
||||
),
|
||||
}
|
||||
return result
|
||||
|
||||
@@ -28,20 +28,16 @@ class Graph:
|
||||
"""Returns all nodes having a path to `node` in `graph`"""
|
||||
if not self.graph.has_node(node):
|
||||
raise InternalException(f"Node {node} not found in the graph!")
|
||||
# This used to use nx.utils.reversed(self.graph), but that is deprecated,
|
||||
# so changing to use self.graph.reverse(copy=False) as recommeneded
|
||||
G = self.graph.reverse(copy=False) if self.graph.is_directed() else self.graph
|
||||
anc = nx.single_source_shortest_path_length(G=G, source=node, cutoff=max_depth).keys()
|
||||
return anc - {node}
|
||||
return {
|
||||
child
|
||||
for _, child in nx.bfs_edges(self.graph, node, reverse=True, depth_limit=max_depth)
|
||||
}
|
||||
|
||||
def descendants(self, node: UniqueId, max_depth: Optional[int] = None) -> Set[UniqueId]:
|
||||
"""Returns all nodes reachable from `node` in `graph`"""
|
||||
if not self.graph.has_node(node):
|
||||
raise InternalException(f"Node {node} not found in the graph!")
|
||||
des = nx.single_source_shortest_path_length(
|
||||
G=self.graph, source=node, cutoff=max_depth
|
||||
).keys()
|
||||
return des - {node}
|
||||
return {child for _, child in nx.bfs_edges(self.graph, node, depth_limit=max_depth)}
|
||||
|
||||
def select_childrens_parents(self, selected: Set[UniqueId]) -> Set[UniqueId]:
|
||||
ancestors_for = self.select_children(selected) | selected
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user