mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 11:31:28 +00:00
Compare commits
126 Commits
adding-sem
...
v1.5.0b1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7617eece3a | ||
|
|
8ce92b56d7 | ||
|
|
21fae1c4a4 | ||
|
|
c952d44ec5 | ||
|
|
971b38c26b | ||
|
|
b7884facbf | ||
|
|
57ce461067 | ||
|
|
b1b830643e | ||
|
|
3cee9d16fa | ||
|
|
c647706ac2 | ||
|
|
7b33ffb1bd | ||
|
|
f38cbc4feb | ||
|
|
480e0e55c5 | ||
|
|
e5c468bb93 | ||
|
|
605c72e86e | ||
|
|
aad46ac5a8 | ||
|
|
d85618ef26 | ||
|
|
1250f23c44 | ||
|
|
daea7d59a7 | ||
|
|
4575757c2a | ||
|
|
d7a2f77705 | ||
|
|
4a4b89606b | ||
|
|
1ebe2e7118 | ||
|
|
f1087e57bf | ||
|
|
250537ba58 | ||
|
|
ccc7222868 | ||
|
|
311a57a21e | ||
|
|
b7c45de6b1 | ||
|
|
c53c3cf181 | ||
|
|
a77d325c8a | ||
|
|
dd41384d82 | ||
|
|
aa55fb2d30 | ||
|
|
864f4efb8b | ||
|
|
83c5a8c24b | ||
|
|
57aef33fb3 | ||
|
|
6d78e5e640 | ||
|
|
f54a876f65 | ||
|
|
8bbae7926b | ||
|
|
db2b12021e | ||
|
|
8b2c9bf39d | ||
|
|
298bf8a1d4 | ||
|
|
77748571b4 | ||
|
|
8ce4c289c5 | ||
|
|
abbece8876 | ||
|
|
3ad40372e6 | ||
|
|
c6d0e7c926 | ||
|
|
bc015843d4 | ||
|
|
df64511feb | ||
|
|
db0981afe7 | ||
|
|
dcf6544f93 | ||
|
|
c2c8959fee | ||
|
|
ccb4fa26cd | ||
|
|
d0b5d752df | ||
|
|
4c63b630de | ||
|
|
9c0b62b4f5 | ||
|
|
e08eede5e2 | ||
|
|
05e53d4143 | ||
|
|
b2ea2b8b25 | ||
|
|
2245d8d710 | ||
|
|
d9424cc710 | ||
|
|
0503c141b7 | ||
|
|
1a6e4a00c7 | ||
|
|
42b7caae19 | ||
|
|
622e5fd71d | ||
|
|
59d773ea7e | ||
|
|
84bf5b4620 | ||
|
|
726c4d6c58 | ||
|
|
acc88d47a3 | ||
|
|
0a74594d09 | ||
|
|
d2f3cdd6de | ||
|
|
92d1ef8482 | ||
|
|
a8abc49632 | ||
|
|
d6ac340df0 | ||
|
|
c653330911 | ||
|
|
82d9b2fa87 | ||
|
|
3f96fad4f9 | ||
|
|
c2c4757a2b | ||
|
|
08b2d94ccd | ||
|
|
7fa61f0816 | ||
|
|
c65ba11ae6 | ||
|
|
b0651b13b5 | ||
|
|
a34521ec07 | ||
|
|
da47b90503 | ||
|
|
d27016a4e7 | ||
|
|
db99e2f68d | ||
|
|
cbb9117ab9 | ||
|
|
e2ccf011d9 | ||
|
|
17014bfad3 | ||
|
|
92b7166c10 | ||
|
|
7b464b8a49 | ||
|
|
5c765bf3e2 | ||
|
|
93619a9a37 | ||
|
|
a181cee6ae | ||
|
|
a0ade13f5a | ||
|
|
9823a56e1d | ||
|
|
3aeab73740 | ||
|
|
9801eebc58 | ||
|
|
11c622230c | ||
|
|
f0349488ed | ||
|
|
c85be323f5 | ||
|
|
6954c4df1b | ||
|
|
30a1595f72 | ||
|
|
f841a7ca76 | ||
|
|
07a004b301 | ||
|
|
b05582de39 | ||
|
|
fa7c4d19f0 | ||
|
|
1913eac5ed | ||
|
|
066346faa2 | ||
|
|
0a03355ceb | ||
|
|
53127daad8 | ||
|
|
91b20b7482 | ||
|
|
5b31cc4266 | ||
|
|
9bb1250869 | ||
|
|
cc5a38ec5a | ||
|
|
b0909b8f5d | ||
|
|
5d278dacf1 | ||
|
|
ce1aaec31d | ||
|
|
1809852a0d | ||
|
|
88d2ee4813 | ||
|
|
77be2e4fdf | ||
|
|
e91863de59 | ||
|
|
44b457c191 | ||
|
|
a0ec0b6f9d | ||
|
|
1ec54abdc4 | ||
|
|
5efc4aa066 | ||
|
|
847c0b9644 |
@@ -1,13 +1,15 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 1.5.0a1
|
current_version = 1.5.0b1
|
||||||
parse = (?P<major>\d+)
|
parse = (?P<major>[\d]+) # major version number
|
||||||
\.(?P<minor>\d+)
|
\.(?P<minor>[\d]+) # minor version number
|
||||||
\.(?P<patch>\d+)
|
\.(?P<patch>[\d]+) # patch version number
|
||||||
((?P<prekind>a|b|rc)
|
(((?P<prekind>a|b|rc) # optional pre-release type
|
||||||
(?P<pre>\d+) # pre-release version num
|
?(?P<num>[\d]+?)) # optional pre-release version number
|
||||||
)?
|
\.?(?P<nightly>[a-z0-9]+)? # optional nightly release indicator
|
||||||
|
)? # expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||||
serialize =
|
serialize =
|
||||||
{major}.{minor}.{patch}{prekind}{pre}
|
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||||
|
{major}.{minor}.{patch}{prekind}{num}
|
||||||
{major}.{minor}.{patch}
|
{major}.{minor}.{patch}
|
||||||
commit = False
|
commit = False
|
||||||
tag = False
|
tag = False
|
||||||
@@ -21,9 +23,11 @@ values =
|
|||||||
rc
|
rc
|
||||||
final
|
final
|
||||||
|
|
||||||
[bumpversion:part:pre]
|
[bumpversion:part:num]
|
||||||
first_value = 1
|
first_value = 1
|
||||||
|
|
||||||
|
[bumpversion:part:nightly]
|
||||||
|
|
||||||
[bumpversion:file:core/setup.py]
|
[bumpversion:file:core/setup.py]
|
||||||
|
|
||||||
[bumpversion:file:core/dbt/version.py]
|
[bumpversion:file:core/dbt/version.py]
|
||||||
|
|||||||
80
.changes/1.5.0-b1.md
Normal file
80
.changes/1.5.0-b1.md
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
## dbt-core 1.5.0-b1 - February 17, 2023
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Data type constraints are now native to SQL table materializations. Enforce columns are specific data types and not null depending on database functionality. ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||||
|
- Have dbt debug spit out structured json logs with flags enabled. ([#5353](https://github.com/dbt-labs/dbt-core/issues/5353))
|
||||||
|
- add adapter_response to dbt test and freshness result ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||||
|
- Improve error message for packages missing `dbt_project.yml` ([#6663](https://github.com/dbt-labs/dbt-core/issues/6663))
|
||||||
|
- Adjust makefile to have clearer instructions for CI env var changes. ([#6689](https://github.com/dbt-labs/dbt-core/issues/6689))
|
||||||
|
- Stand-alone Python module for PostgresColumn ([#6772](https://github.com/dbt-labs/dbt-core/issues/6772))
|
||||||
|
- Exposure owner requires one of name or email keys, and accepts additional arbitrary keys ([#6833](https://github.com/dbt-labs/dbt-core/issues/6833))
|
||||||
|
- Parse 'group' resource ([#6921](https://github.com/dbt-labs/dbt-core/issues/6921))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- add merge_exclude_columns adapter tests ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||||
|
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||||
|
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||||
|
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||||
|
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||||
|
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||||
|
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||||
|
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||||
|
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||||
|
- Reapply logging fixes which were accidentally reverted ([#6936](https://github.com/dbt-labs/dbt-core/issues/6936))
|
||||||
|
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||||
|
- Readd initialization events, --log-cache-events in new CLI ([#6933](https://github.com/dbt-labs/dbt-core/issues/6933))
|
||||||
|
- Fix previous state tests and disabled exposures, metrics ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752), [#6753](https://github.com/dbt-labs/dbt-core/issues/6753))
|
||||||
|
- Make use of hashlib.md5() FIPS compliant ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
|
||||||
|
- update link to installation instructions ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||||
|
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||||
|
- Searchable column descriptions ([dbt-docs/#140](https://github.com/dbt-labs/dbt-docs/issues/140), [dbt-docs/#322](https://github.com/dbt-labs/dbt-docs/issues/322), [dbt-docs/#369](https://github.com/dbt-labs/dbt-docs/issues/369))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- [CT-921] dbt compile works in click ([#5545](https://github.com/dbt-labs/dbt-core/issues/5545))
|
||||||
|
- Fix use of ConnectionReused logging event ([#6168](https://github.com/dbt-labs/dbt-core/issues/6168))
|
||||||
|
- Port docs tests to pytest ([#6573](https://github.com/dbt-labs/dbt-core/issues/6573))
|
||||||
|
- Update deprecated github action command ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||||
|
- dbt snapshot works in click ([#5554](https://github.com/dbt-labs/dbt-core/issues/5554))
|
||||||
|
- dbt list working with click ([#5549](https://github.com/dbt-labs/dbt-core/issues/5549))
|
||||||
|
- Add dbt run-operation to click CLI ([#5552](https://github.com/dbt-labs/dbt-core/issues/5552))
|
||||||
|
- dbt build working with new click framework ([#5541](https://github.com/dbt-labs/dbt-core/issues/5541))
|
||||||
|
- dbt docs generate works with new click framework ([#5543](https://github.com/dbt-labs/dbt-core/issues/5543))
|
||||||
|
- Replaced the EmptyLine event with a more general Formatting event, and added a Note event. ([#6481](https://github.com/dbt-labs/dbt-core/issues/6481))
|
||||||
|
- Small optimization on manifest parsing benefitting large DAGs ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||||
|
- Revised and simplified various structured logging events ([#6664](https://github.com/dbt-labs/dbt-core/issues/6664), [#6665](https://github.com/dbt-labs/dbt-core/issues/6665), [#6666](https://github.com/dbt-labs/dbt-core/issues/6666))
|
||||||
|
- dbt init works with click ([#5548](https://github.com/dbt-labs/dbt-core/issues/5548))
|
||||||
|
- [CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands ([#5544](https://github.com/dbt-labs/dbt-core/issues/5544), [#6722](https://github.com/dbt-labs/dbt-core/issues/6722))
|
||||||
|
- Migrate debug task to click ([#5546](https://github.com/dbt-labs/dbt-core/issues/5546))
|
||||||
|
- Optimized GraphQueue to remove graph analysis bottleneck in large dags. ([#6759](https://github.com/dbt-labs/dbt-core/issues/6759))
|
||||||
|
- Implement --version for click cli ([#6757](https://github.com/dbt-labs/dbt-core/issues/6757))
|
||||||
|
- [CT-1841] Convert custom target test to Pytest ([#6638](https://github.com/dbt-labs/dbt-core/issues/6638))
|
||||||
|
- Remove BigQuery-specific btye abbreviations ([#6741](https://github.com/dbt-labs/dbt-core/issues/6741))
|
||||||
|
- warn_error/warn_error_options mutual exclusivity in click ([#6579](https://github.com/dbt-labs/dbt-core/issues/6579))
|
||||||
|
- Enables the new Click Cli on the commandline! 🚀 ([#6784](https://github.com/dbt-labs/dbt-core/issues/6784))
|
||||||
|
- Lazily call --version ([#6812](https://github.com/dbt-labs/dbt-core/issues/6812))
|
||||||
|
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||||
|
- flags.THREADS defaults to None ([#6887](https://github.com/dbt-labs/dbt-core/issues/6887))
|
||||||
|
- Fixing target type exposure error ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||||
|
- Test binary serialization of logging events ([#6852](https://github.com/dbt-labs/dbt-core/issues/6852))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Bump ubuntu from 22.04 to 23.04 ([#6865](https://github.com/dbt-labs/dbt-core/pull/6865))
|
||||||
|
- Revert hoisting dbt.cli.main into the dbt.name namespace ([#](https://github.com/dbt-labs/dbt-core/pull/))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@aezomz](https://github.com/aezomz) ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||||
|
- [@boxysean](https://github.com/boxysean) ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||||
|
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||||
|
- [@dave-connors-3](https://github.com/dave-connors-3) ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||||
|
- [@davidbloss](https://github.com/davidbloss) ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||||
|
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||||
|
- [@nielspardon](https://github.com/nielspardon) ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||||
|
- [@ryancharris](https://github.com/ryancharris) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
- [@sungchun12](https://github.com/sungchun12) ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||||
6
.changes/1.5.0/Dependencies-20230206-000926.yaml
Normal file
6
.changes/1.5.0/Dependencies-20230206-000926.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: "Dependencies"
|
||||||
|
body: "Bump ubuntu from 22.04 to 23.04"
|
||||||
|
time: 2023-02-06T00:09:26.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
PR: 6865
|
||||||
6
.changes/1.5.0/Dependencies-20230215-091759.yaml
Normal file
6
.changes/1.5.0/Dependencies-20230215-091759.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Revert hoisting dbt.cli.main into the dbt.name namespace
|
||||||
|
time: 2023-02-15T09:17:59.04148-08:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
PR: ''
|
||||||
6
.changes/1.5.0/Docs-20230207-123807.yaml
Normal file
6
.changes/1.5.0/Docs-20230207-123807.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: update link to installation instructions
|
||||||
|
time: 2023-02-07T12:38:07.336783-05:00
|
||||||
|
custom:
|
||||||
|
Author: ryancharris
|
||||||
|
Issue: None
|
||||||
6
.changes/1.5.0/Docs-20230209-082901.yaml
Normal file
6
.changes/1.5.0/Docs-20230209-082901.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Fix JSON path to overview docs
|
||||||
|
time: 2023-02-09T08:29:01.432616-07:00
|
||||||
|
custom:
|
||||||
|
Author: halvorlu
|
||||||
|
Issue: "366"
|
||||||
6
.changes/1.5.0/Docs-20230209-212729.yaml
Normal file
6
.changes/1.5.0/Docs-20230209-212729.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Searchable column descriptions
|
||||||
|
time: 2023-02-09T21:27:29.570243-07:00
|
||||||
|
custom:
|
||||||
|
Author: dbeatty10
|
||||||
|
Issue: 140 322 369
|
||||||
8
.changes/1.5.0/Features-20221118-141120.yaml
Normal file
8
.changes/1.5.0/Features-20221118-141120.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Data type constraints are now native to SQL table materializations. Enforce
|
||||||
|
columns are specific data types and not null depending on database functionality.
|
||||||
|
time: 2022-11-18T14:11:20.868062-08:00
|
||||||
|
custom:
|
||||||
|
Author: sungchun12
|
||||||
|
Issue: "6079"
|
||||||
|
PR: "6271"
|
||||||
6
.changes/1.5.0/Features-20230107-003157.yaml
Normal file
6
.changes/1.5.0/Features-20230107-003157.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Have dbt debug spit out structured json logs with flags enabled.
|
||||||
|
time: 2023-01-07T00:31:57.516063-08:00
|
||||||
|
custom:
|
||||||
|
Author: versusfacit
|
||||||
|
Issue: "5353"
|
||||||
6
.changes/1.5.0/Features-20230118-233801.yaml
Normal file
6
.changes/1.5.0/Features-20230118-233801.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: add adapter_response to dbt test and freshness result
|
||||||
|
time: 2023-01-18T23:38:01.857342+08:00
|
||||||
|
custom:
|
||||||
|
Author: aezomz
|
||||||
|
Issue: "2964"
|
||||||
6
.changes/1.5.0/Features-20230120-112921.yaml
Normal file
6
.changes/1.5.0/Features-20230120-112921.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Improve error message for packages missing `dbt_project.yml`
|
||||||
|
time: 2023-01-20T11:29:21.509967-07:00
|
||||||
|
custom:
|
||||||
|
Author: dbeatty10
|
||||||
|
Issue: "6663"
|
||||||
6
.changes/1.5.0/Features-20230126-154716.yaml
Normal file
6
.changes/1.5.0/Features-20230126-154716.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Adjust makefile to have clearer instructions for CI env var changes.
|
||||||
|
time: 2023-01-26T15:47:16.887327-08:00
|
||||||
|
custom:
|
||||||
|
Author: versusfacit
|
||||||
|
Issue: "6689"
|
||||||
6
.changes/1.5.0/Features-20230127-162812.yaml
Normal file
6
.changes/1.5.0/Features-20230127-162812.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Stand-alone Python module for PostgresColumn
|
||||||
|
time: 2023-01-27T16:28:12.212427-08:00
|
||||||
|
custom:
|
||||||
|
Author: nssalian
|
||||||
|
Issue: "6772"
|
||||||
7
.changes/1.5.0/Features-20230209-092059.yaml
Normal file
7
.changes/1.5.0/Features-20230209-092059.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Exposure owner requires one of name or email keys, and accepts additional arbitrary
|
||||||
|
keys
|
||||||
|
time: 2023-02-09T09:20:59.300272-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "6833"
|
||||||
6
.changes/1.5.0/Features-20230209-093409.yaml
Normal file
6
.changes/1.5.0/Features-20230209-093409.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Parse 'group' resource
|
||||||
|
time: 2023-02-09T09:34:09.547006-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "6921"
|
||||||
6
.changes/1.5.0/Fixes-20230123-132814.yaml
Normal file
6
.changes/1.5.0/Fixes-20230123-132814.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: add merge_exclude_columns adapter tests
|
||||||
|
time: 2023-01-23T13:28:14.808748-06:00
|
||||||
|
custom:
|
||||||
|
Author: dave-connors-3
|
||||||
|
Issue: "6699"
|
||||||
6
.changes/1.5.0/Fixes-20230124-115837.yaml
Normal file
6
.changes/1.5.0/Fixes-20230124-115837.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Include adapter_response in NodeFinished run_result log event
|
||||||
|
time: 2023-01-24T11:58:37.74179-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6703"
|
||||||
6
.changes/1.5.0/Fixes-20230124-141943.yaml
Normal file
6
.changes/1.5.0/Fixes-20230124-141943.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Sort cli vars before hashing for partial parsing
|
||||||
|
time: 2023-01-24T14:19:43.333628-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6710"
|
||||||
6
.changes/1.5.0/Fixes-20230125-191739.yaml
Normal file
6
.changes/1.5.0/Fixes-20230125-191739.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: '[Regression] exposure_content referenced incorrectly'
|
||||||
|
time: 2023-01-25T19:17:39.942081-05:00
|
||||||
|
custom:
|
||||||
|
Author: Mathyoub
|
||||||
|
Issue: "6738"
|
||||||
6
.changes/1.5.0/Fixes-20230201-154418.yaml
Normal file
6
.changes/1.5.0/Fixes-20230201-154418.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Remove pin on packaging and stop using it for prerelease comparisons
|
||||||
|
time: 2023-02-01T15:44:18.279158-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6834"
|
||||||
6
.changes/1.5.0/Fixes-20230203-135557.yaml
Normal file
6
.changes/1.5.0/Fixes-20230203-135557.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros
|
||||||
|
time: 2023-02-03T13:55:57.853715+01:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6
|
||||||
|
Issue: "6806"
|
||||||
6
.changes/1.5.0/Fixes-20230207-143544.yaml
Normal file
6
.changes/1.5.0/Fixes-20230207-143544.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix regression of --quiet cli parameter behavior
|
||||||
|
time: 2023-02-07T14:35:44.160163-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "6749"
|
||||||
6
.changes/1.5.0/Fixes-20230208-110551.yaml
Normal file
6
.changes/1.5.0/Fixes-20230208-110551.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Ensure results from hooks contain nodes when processing them
|
||||||
|
time: 2023-02-08T11:05:51.952494-06:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "6796"
|
||||||
6
.changes/1.5.0/Fixes-20230208-154935.yaml
Normal file
6
.changes/1.5.0/Fixes-20230208-154935.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Always flush stdout after logging
|
||||||
|
time: 2023-02-08T15:49:35.175874-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "6901"
|
||||||
6
.changes/1.5.0/Fixes-20230210-103028.yaml
Normal file
6
.changes/1.5.0/Fixes-20230210-103028.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Reapply logging fixes which were accidentally reverted
|
||||||
|
time: 2023-02-10T10:30:28.179997-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "6936"
|
||||||
6
.changes/1.5.0/Fixes-20230210-194157.yaml
Normal file
6
.changes/1.5.0/Fixes-20230210-194157.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Set relation_name in test nodes at compile time
|
||||||
|
time: 2023-02-10T19:41:57.386766-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6930"
|
||||||
6
.changes/1.5.0/Fixes-20230213-130522.yaml
Normal file
6
.changes/1.5.0/Fixes-20230213-130522.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Readd initialization events, --log-cache-events in new CLI
|
||||||
|
time: 2023-02-13T13:05:22.989477+01:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6
|
||||||
|
Issue: "6933"
|
||||||
6
.changes/1.5.0/Fixes-20230213-170723.yaml
Normal file
6
.changes/1.5.0/Fixes-20230213-170723.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix previous state tests and disabled exposures, metrics
|
||||||
|
time: 2023-02-13T17:07:23.185679-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: 6752 6753
|
||||||
6
.changes/1.5.0/Fixes-20230215-104536.yaml
Normal file
6
.changes/1.5.0/Fixes-20230215-104536.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Make use of hashlib.md5() FIPS compliant
|
||||||
|
time: 2023-02-15T10:45:36.755797+01:00
|
||||||
|
custom:
|
||||||
|
Author: nielspardon
|
||||||
|
Issue: "6900"
|
||||||
6
.changes/1.5.0/Under the Hood-20230111-145143.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230111-145143.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: '[CT-921] dbt compile works in click'
|
||||||
|
time: 2023-01-11T14:51:43.324107-08:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "5545"
|
||||||
6
.changes/1.5.0/Under the Hood-20230113-150700.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230113-150700.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Port docs tests to pytest
|
||||||
|
time: 2023-01-13T15:07:00.477038-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "6573"
|
||||||
6
.changes/1.5.0/Under the Hood-20230117-162505.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230117-162505.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: dbt snapshot works in click
|
||||||
|
time: 2023-01-17T16:25:05.973769-08:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "5554"
|
||||||
6
.changes/1.5.0/Under the Hood-20230117-213729.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230117-213729.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: dbt list working with click
|
||||||
|
time: 2023-01-17T21:37:29.91632-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "5549"
|
||||||
6
.changes/1.5.0/Under the Hood-20230119-105304.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230119-105304.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Add dbt run-operation to click CLI
|
||||||
|
time: 2023-01-19T10:53:04.154871+01:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6
|
||||||
|
Issue: "5552"
|
||||||
6
.changes/1.5.0/Under the Hood-20230119-205650.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230119-205650.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: dbt build working with new click framework
|
||||||
|
time: 2023-01-19T20:56:50.50549-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "5541"
|
||||||
6
.changes/1.5.0/Under the Hood-20230119-211040.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230119-211040.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: dbt docs generate works with new click framework
|
||||||
|
time: 2023-01-19T21:10:40.698851-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "5543"
|
||||||
7
.changes/1.5.0/Under the Hood-20230120-172254.yaml
Normal file
7
.changes/1.5.0/Under the Hood-20230120-172254.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Replaced the EmptyLine event with a more general Formatting event, and added
|
||||||
|
a Note event.
|
||||||
|
time: 2023-01-20T17:22:54.45828-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "6481"
|
||||||
6
.changes/1.5.0/Under the Hood-20230122-215235.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230122-215235.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Small optimization on manifest parsing benefitting large DAGs
|
||||||
|
time: 2023-01-22T21:52:35.549814+01:00
|
||||||
|
custom:
|
||||||
|
Author: boxysean
|
||||||
|
Issue: "6697"
|
||||||
6
.changes/1.5.0/Under the Hood-20230124-153553.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230124-153553.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Revised and simplified various structured logging events
|
||||||
|
time: 2023-01-24T15:35:53.065356-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: 6664 6665 6666
|
||||||
6
.changes/1.5.0/Under the Hood-20230124-175110.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230124-175110.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: dbt init works with click
|
||||||
|
time: 2023-01-24T17:51:10.74065-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "5548"
|
||||||
6
.changes/1.5.0/Under the Hood-20230125-041136.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230125-041136.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: '[CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands'
|
||||||
|
time: 2023-01-25T04:11:36.57506-08:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: 5544 6722
|
||||||
6
.changes/1.5.0/Under the Hood-20230125-102606.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230125-102606.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Migrate debug task to click
|
||||||
|
time: 2023-01-25T10:26:06.735994-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "5546"
|
||||||
6
.changes/1.5.0/Under the Hood-20230126-135939.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230126-135939.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: ' Optimized GraphQueue to remove graph analysis bottleneck in large dags.'
|
||||||
|
time: 2023-01-26T13:59:39.518345-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "6759"
|
||||||
6
.changes/1.5.0/Under the Hood-20230126-143102.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230126-143102.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Implement --version for click cli
|
||||||
|
time: 2023-01-26T14:31:02.740282-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "6757"
|
||||||
6
.changes/1.5.0/Under the Hood-20230126-164741.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230126-164741.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: '[CT-1841] Convert custom target test to Pytest'
|
||||||
|
time: 2023-01-26T16:47:41.198714-08:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "6638"
|
||||||
6
.changes/1.5.0/Under the Hood-20230130-153306.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230130-153306.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove BigQuery-specific btye abbreviations
|
||||||
|
time: 2023-01-30T15:33:06.28965-07:00
|
||||||
|
custom:
|
||||||
|
Author: dbeatty10
|
||||||
|
Issue: "6741"
|
||||||
6
.changes/1.5.0/Under the Hood-20230130-175752.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230130-175752.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: "Enables the new Click Cli on the commandline! \U0001F680"
|
||||||
|
time: 2023-01-30T17:57:52.65626-06:00
|
||||||
|
custom:
|
||||||
|
Author: iknox-fa
|
||||||
|
Issue: "6784"
|
||||||
6
.changes/1.5.0/Under the Hood-20230130-180917.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230130-180917.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: warn_error/warn_error_options mutual exclusivity in click
|
||||||
|
time: 2023-01-30T18:09:17.240662-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "6579"
|
||||||
6
.changes/1.5.0/Under the Hood-20230131-141806.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230131-141806.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Lazily call --version
|
||||||
|
time: 2023-01-31T14:18:06.02312-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "6812"
|
||||||
6
.changes/1.5.0/Under the Hood-20230203-143551.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230203-143551.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Moving simple_seed to adapter zone to help adapter test conversions
|
||||||
|
time: 2023-02-03T14:35:51.481856-08:00
|
||||||
|
custom:
|
||||||
|
Author: nssalian
|
||||||
|
Issue: CT-1959
|
||||||
6
.changes/1.5.0/Under the Hood-20230207-165111.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230207-165111.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: flags.THREADS defaults to None
|
||||||
|
time: 2023-02-07T16:51:11.011984-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "6887"
|
||||||
6
.changes/1.5.0/Under the Hood-20230210-084647.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230210-084647.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Fixing target type exposure error
|
||||||
|
time: 2023-02-10T08:46:47.72936-06:00
|
||||||
|
custom:
|
||||||
|
Author: callum-mcdata
|
||||||
|
Issue: "6928"
|
||||||
6
.changes/1.5.0/Under the Hood-20230216-143252.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230216-143252.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Test binary serialization of logging events
|
||||||
|
time: 2023-02-16T14:32:52.524225-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6852"
|
||||||
@@ -4,6 +4,7 @@ headerPath: header.tpl.md
|
|||||||
versionHeaderPath: ""
|
versionHeaderPath: ""
|
||||||
changelogPath: CHANGELOG.md
|
changelogPath: CHANGELOG.md
|
||||||
versionExt: md
|
versionExt: md
|
||||||
|
envPrefix: "CHANGIE_"
|
||||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||||
kindFormat: '### {{.Kind}}'
|
kindFormat: '### {{.Kind}}'
|
||||||
changeFormat: |-
|
changeFormat: |-
|
||||||
@@ -87,32 +88,44 @@ custom:
|
|||||||
|
|
||||||
footerFormat: |
|
footerFormat: |
|
||||||
{{- $contributorDict := dict }}
|
{{- $contributorDict := dict }}
|
||||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
||||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
||||||
|
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
||||||
|
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
||||||
|
{{- range $team_member := $core_team }}
|
||||||
|
{{- $team_member_lower := lower $team_member }}
|
||||||
|
{{- $maintainers = append $maintainers $team_member_lower }}
|
||||||
|
{{- end }}
|
||||||
{{- range $change := .Changes }}
|
{{- range $change := .Changes }}
|
||||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||||
{{- /* loop through all authors for a single changelog */}}
|
{{- /* loop through all authors for a single changelog */}}
|
||||||
{{- range $author := $authorList }}
|
{{- range $author := $authorList }}
|
||||||
{{- $authorLower := lower $author }}
|
{{- $authorLower := lower $author }}
|
||||||
{{- /* we only want to include non-core team contributors */}}
|
{{- /* we only want to include non-core team contributors */}}
|
||||||
{{- if not (has $authorLower $core_team)}}
|
{{- if not (has $authorLower $maintainers)}}
|
||||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core issues */}}
|
{{- $IssueList := list }}
|
||||||
{{- $changeLink := $change.Kind }}
|
{{- $changeLink := $change.Kind }}
|
||||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $change.Custom.PR }}
|
{{- $changes := splitList " " $change.Custom.PR }}
|
||||||
{{- else if eq $change.Kind "Docs"}}
|
{{- range $issueNbr := $changes }}
|
||||||
{{- $changeLink = "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||||
|
{{- $IssueList = append $IssueList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
{{- else }}
|
{{- else }}
|
||||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||||
|
{{- range $issueNbr := $changes }}
|
||||||
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||||
|
{{- $IssueList = append $IssueList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- /* check if this contributor has other changes associated with them already */}}
|
{{- /* check if this contributor has other changes associated with them already */}}
|
||||||
{{- if hasKey $contributorDict $author }}
|
{{- if hasKey $contributorDict $author }}
|
||||||
{{- $contributionList := get $contributorDict $author }}
|
{{- $contributionList := get $contributorDict $author }}
|
||||||
{{- $contributionList = append $contributionList $changeLink }}
|
{{- $contributionList = concat $contributionList $IssueList }}
|
||||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||||
{{- else }}
|
{{- else }}
|
||||||
{{- $contributionList := list $changeLink }}
|
{{- $contributionList := $IssueList }}
|
||||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|||||||
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# **what?**
|
||||||
|
# Cuts a new `*.latest` branch
|
||||||
|
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
||||||
|
# `main` and bumps `main` to the input version.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when called manually.
|
||||||
|
|
||||||
|
name: Cut new release branch
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version_to_bump_main:
|
||||||
|
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
||||||
|
required: true
|
||||||
|
new_branch_name:
|
||||||
|
description: 'The full name of the new branch (ex. 1.5.latest)'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cut_branch:
|
||||||
|
name: "Cut branch and clean up main for dbt-core"
|
||||||
|
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yaml@main
|
||||||
|
with:
|
||||||
|
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
||||||
|
new_branch_name: ${{ inputs.new_branch_name }}
|
||||||
|
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||||
|
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||||
|
secrets:
|
||||||
|
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
109
.github/workflows/nightly-release.yml
vendored
Normal file
109
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
# **what?**
|
||||||
|
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||||
|
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||||
|
# - pass data to release workflow;
|
||||||
|
# - night release will be pushed to GitHub as a draft release;
|
||||||
|
# - night build will be pushed to test PyPI;
|
||||||
|
#
|
||||||
|
# **why?**
|
||||||
|
# Ensure an automated and tested release process for nightly builds
|
||||||
|
#
|
||||||
|
# **when?**
|
||||||
|
# This workflow runs on schedule or can be run manually on demand.
|
||||||
|
|
||||||
|
name: Nightly Test Release to GitHub and PyPI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch: # for manual triggering
|
||||||
|
schedule:
|
||||||
|
- cron: 0 9 * * *
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # this is the permission that allows creating a new release
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
env:
|
||||||
|
RELEASE_BRANCH: "main"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
aggregate-release-data:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||||
|
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||||
|
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ env.RELEASE_BRANCH }}
|
||||||
|
|
||||||
|
- name: "Resolve Commit To Release"
|
||||||
|
id: resolve-commit-sha
|
||||||
|
run: |
|
||||||
|
commit_sha=$(git rev-parse HEAD)
|
||||||
|
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Get Current Version Number"
|
||||||
|
id: version-number-sources
|
||||||
|
run: |
|
||||||
|
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||||
|
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Audit Version And Parse Into Parts"
|
||||||
|
id: semver
|
||||||
|
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||||
|
with:
|
||||||
|
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||||
|
|
||||||
|
- name: "Get Current Date"
|
||||||
|
id: current-date
|
||||||
|
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Generate Nightly Release Version Number"
|
||||||
|
id: nightly-release-version
|
||||||
|
run: |
|
||||||
|
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
||||||
|
echo "number=$number" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||||
|
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||||
|
with:
|
||||||
|
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||||
|
|
||||||
|
- name: "Set Release Branch"
|
||||||
|
id: release-branch
|
||||||
|
run: |
|
||||||
|
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
log-outputs-aggregate-release-data:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [aggregate-release-data]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "[DEBUG] Log Outputs"
|
||||||
|
run: |
|
||||||
|
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||||
|
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||||
|
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||||
|
|
||||||
|
release-github-pypi:
|
||||||
|
needs: [aggregate-release-data]
|
||||||
|
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
|
with:
|
||||||
|
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||||
|
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||||
|
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||||
|
build_script_path: "scripts/build-dist.sh"
|
||||||
|
env_setup_script_path: "scripts/env-setup.sh"
|
||||||
|
s3_bucket_name: "core-team-artifacts"
|
||||||
|
package_test_command: "dbt --version"
|
||||||
|
test_run: true
|
||||||
|
nightly_release: true
|
||||||
|
secrets: inherit
|
||||||
30
.github/workflows/release-branch-tests.yml
vendored
30
.github/workflows/release-branch-tests.yml
vendored
@@ -28,7 +28,33 @@ on:
|
|||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
fetch-latest-branches:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
latest-branches: ${{ steps.get-latest-branches.outputs.repo-branches }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Fetch dbt-core Latest Branches"
|
||||||
|
uses: dbt-labs/actions/fetch-repo-branches@v1.1.1
|
||||||
|
id: get-latest-branches
|
||||||
|
with:
|
||||||
|
repo_name: ${{ github.event.repository.name }}
|
||||||
|
organization: "dbt-labs"
|
||||||
|
pat: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
fetch_protected_branches_only: true
|
||||||
|
regex: "^1.[0-9]+.latest$"
|
||||||
|
perform_match_method: "match"
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
- name: "[ANNOTATION] ${{ github.event.repository.name }} - branches to test"
|
||||||
|
run: |
|
||||||
|
title="${{ github.event.repository.name }} - branches to test"
|
||||||
|
message="The workflow will run tests for the following branches of the ${{ github.event.repository.name }} repo: ${{ steps.get-latest-branches.outputs.repo-branches }}"
|
||||||
|
echo "::notice $title::$message"
|
||||||
|
|
||||||
kick-off-ci:
|
kick-off-ci:
|
||||||
|
needs: [fetch-latest-branches]
|
||||||
name: Kick-off CI
|
name: Kick-off CI
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
@@ -39,7 +65,9 @@ jobs:
|
|||||||
max-parallel: 1
|
max-parallel: 1
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
|
branch: ${{ fromJSON(needs.fetch-latest-branches.outputs.latest-branches) }}
|
||||||
|
include:
|
||||||
|
- branch: 'main'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||||
|
|||||||
339
.github/workflows/release.yml
vendored
339
.github/workflows/release.yml
vendored
@@ -1,24 +1,110 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Take the given commit, run unit tests specifically on that sha, build and
|
# Release workflow provides the following steps:
|
||||||
# package it, and then release to GitHub and PyPi with that specific build
|
# - checkout the given commit;
|
||||||
|
# - validate version in sources and changelog file for given version;
|
||||||
|
# - bump the version and generate a changelog if needed;
|
||||||
|
# - merge all changes to the target branch if needed;
|
||||||
|
# - run unit and integration tests against given commit;
|
||||||
|
# - build and package that SHA;
|
||||||
|
# - release it to GitHub and PyPI with that specific build;
|
||||||
|
#
|
||||||
# **why?**
|
# **why?**
|
||||||
# Ensure an automated and tested release process
|
# Ensure an automated and tested release process
|
||||||
|
#
|
||||||
# **when?**
|
# **when?**
|
||||||
# This will only run manually with a given sha and version
|
# This workflow can be run manually on demand or can be called by other workflows
|
||||||
|
|
||||||
name: Release to GitHub and PyPi
|
name: Release to GitHub and PyPI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
sha:
|
sha:
|
||||||
description: 'The last commit sha in the release'
|
description: "The last commit sha in the release"
|
||||||
required: true
|
type: string
|
||||||
|
required: true
|
||||||
|
target_branch:
|
||||||
|
description: "The branch to release from"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
version_number:
|
version_number:
|
||||||
description: 'The release version number (i.e. 1.0.0b1)'
|
description: "The release version number (i.e. 1.0.0b1)"
|
||||||
required: true
|
type: string
|
||||||
|
required: true
|
||||||
|
build_script_path:
|
||||||
|
description: "Build script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/build-dist.sh"
|
||||||
|
required: true
|
||||||
|
env_setup_script_path:
|
||||||
|
description: "Environment setup script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/env-setup.sh"
|
||||||
|
required: false
|
||||||
|
s3_bucket_name:
|
||||||
|
description: "AWS S3 bucket name"
|
||||||
|
type: string
|
||||||
|
default: "core-team-artifacts"
|
||||||
|
required: true
|
||||||
|
package_test_command:
|
||||||
|
description: "Package test command"
|
||||||
|
type: string
|
||||||
|
default: "dbt --version"
|
||||||
|
required: true
|
||||||
|
test_run:
|
||||||
|
description: "Test run (Publish release as draft)"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
required: false
|
||||||
|
nightly_release:
|
||||||
|
description: "Nightly release to dev environment"
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
required: false
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
sha:
|
||||||
|
description: "The last commit sha in the release"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
target_branch:
|
||||||
|
description: "The branch to release from"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
version_number:
|
||||||
|
description: "The release version number (i.e. 1.0.0b1)"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
build_script_path:
|
||||||
|
description: "Build script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/build-dist.sh"
|
||||||
|
required: true
|
||||||
|
env_setup_script_path:
|
||||||
|
description: "Environment setup script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/env-setup.sh"
|
||||||
|
required: false
|
||||||
|
s3_bucket_name:
|
||||||
|
description: "AWS S3 bucket name"
|
||||||
|
type: string
|
||||||
|
default: "core-team-artifacts"
|
||||||
|
required: true
|
||||||
|
package_test_command:
|
||||||
|
description: "Package test command"
|
||||||
|
type: string
|
||||||
|
default: "dbt --version"
|
||||||
|
required: true
|
||||||
|
test_run:
|
||||||
|
description: "Test run (Publish release as draft)"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
required: false
|
||||||
|
nightly_release:
|
||||||
|
description: "Nightly release to dev environment"
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
required: false
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # this is the permission that allows creating a new release
|
contents: write # this is the permission that allows creating a new release
|
||||||
@@ -28,175 +114,116 @@ defaults:
|
|||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
unit:
|
log-inputs:
|
||||||
name: Unit test
|
name: Log Inputs
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
env:
|
|
||||||
TOXENV: "unit"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: "[DEBUG] Print Variables"
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
ref: ${{ github.event.inputs.sha }}
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.8
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||||
pip install tox
|
echo The branch to release from: ${{ inputs.target_branch }}
|
||||||
pip --version
|
echo The release version number: ${{ inputs.version_number }}
|
||||||
tox --version
|
echo Build script path: ${{ inputs.build_script_path }}
|
||||||
|
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||||
|
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||||
|
echo Package test command: ${{ inputs.package_test_command }}
|
||||||
|
echo Test run: ${{ inputs.test_run }}
|
||||||
|
echo Nightly release: ${{ inputs.nightly_release }}
|
||||||
|
|
||||||
- name: Run tox
|
bump-version-generate-changelog:
|
||||||
run: tox
|
name: Bump package version, Generate changelog
|
||||||
|
|
||||||
build:
|
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||||
name: build packages
|
|
||||||
|
with:
|
||||||
|
sha: ${{ inputs.sha }}
|
||||||
|
version_number: ${{ inputs.version_number }}
|
||||||
|
target_branch: ${{ inputs.target_branch }}
|
||||||
|
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||||
|
test_run: ${{ inputs.test_run }}
|
||||||
|
nightly_release: ${{ inputs.nightly_release }}
|
||||||
|
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
log-outputs-bump-version-generate-changelog:
|
||||||
|
name: "[Log output] Bump package version, Generate changelog"
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
|
||||||
|
needs: [bump-version-generate-changelog]
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Print variables
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
ref: ${{ github.event.inputs.sha }}
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.8
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
pip --version
|
|
||||||
|
|
||||||
- name: Build distributions
|
build-test-package:
|
||||||
run: ./scripts/build-dist.sh
|
name: Build, Test, Package
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
needs: [bump-version-generate-changelog]
|
||||||
|
|
||||||
- name: Show distributions
|
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||||
run: ls -lh dist/
|
|
||||||
|
|
||||||
- name: Check distribution descriptions
|
with:
|
||||||
run: |
|
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
twine check dist/*
|
version_number: ${{ inputs.version_number }}
|
||||||
|
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
|
build_script_path: ${{ inputs.build_script_path }}
|
||||||
|
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||||
|
package_test_command: ${{ inputs.package_test_command }}
|
||||||
|
test_run: ${{ inputs.test_run }}
|
||||||
|
nightly_release: ${{ inputs.nightly_release }}
|
||||||
|
|
||||||
- name: Check wheel contents
|
secrets:
|
||||||
run: |
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dist
|
|
||||||
path: |
|
|
||||||
dist/
|
|
||||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
|
||||||
|
|
||||||
test-build:
|
|
||||||
name: verify packages
|
|
||||||
|
|
||||||
needs: [build, unit]
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.8
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
|
||||||
pip install --user --upgrade pip
|
|
||||||
pip install --upgrade wheel
|
|
||||||
pip --version
|
|
||||||
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dist
|
|
||||||
path: dist/
|
|
||||||
|
|
||||||
- name: Show distributions
|
|
||||||
run: ls -lh dist/
|
|
||||||
|
|
||||||
- name: Install wheel distributions
|
|
||||||
run: |
|
|
||||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check wheel distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
- name: Install source distributions
|
|
||||||
run: |
|
|
||||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check source distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
github-release:
|
github-release:
|
||||||
name: GitHub Release
|
name: GitHub Release
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
|
||||||
needs: test-build
|
needs: [bump-version-generate-changelog, build-test-package]
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||||
|
|
||||||
steps:
|
with:
|
||||||
- uses: actions/download-artifact@v2
|
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
with:
|
version_number: ${{ inputs.version_number }}
|
||||||
name: dist
|
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
path: '.'
|
test_run: ${{ inputs.test_run }}
|
||||||
|
|
||||||
# Need to set an output variable because env variables can't be taken as input
|
|
||||||
# This is needed for the next step with releasing to GitHub
|
|
||||||
- name: Find release type
|
|
||||||
id: release_type
|
|
||||||
env:
|
|
||||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
|
||||||
run: |
|
|
||||||
echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Creating GitHub Release
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
name: dbt-core v${{github.event.inputs.version_number}}
|
|
||||||
tag_name: v${{github.event.inputs.version_number}}
|
|
||||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
|
||||||
target_commitish: ${{github.event.inputs.sha}}
|
|
||||||
body: |
|
|
||||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
|
||||||
files: |
|
|
||||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
|
||||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
|
||||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
|
||||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
|
||||||
|
|
||||||
pypi-release:
|
pypi-release:
|
||||||
name: Pypi release
|
name: PyPI Release
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
needs: [github-release]
|
||||||
|
|
||||||
needs: github-release
|
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||||
|
|
||||||
environment: PypiProd
|
with:
|
||||||
steps:
|
version_number: ${{ inputs.version_number }}
|
||||||
- uses: actions/download-artifact@v2
|
test_run: ${{ inputs.test_run }}
|
||||||
with:
|
|
||||||
name: dist
|
|
||||||
path: 'dist'
|
|
||||||
|
|
||||||
- name: Publish distribution to PyPI
|
secrets:
|
||||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||||
with:
|
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
|
||||||
|
slack-notification:
|
||||||
|
name: Slack Notification
|
||||||
|
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||||
|
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
bump-version-generate-changelog,
|
||||||
|
build-test-package,
|
||||||
|
github-release,
|
||||||
|
pypi-release,
|
||||||
|
]
|
||||||
|
|
||||||
|
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||||
|
with:
|
||||||
|
status: "failure"
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||||
|
|||||||
@@ -30,6 +30,8 @@ jobs:
|
|||||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||||
# tells integration tests to output into json format
|
# tells integration tests to output into json format
|
||||||
DBT_LOG_FORMAT: "json"
|
DBT_LOG_FORMAT: "json"
|
||||||
|
# tell eventmgr to convert logging events into bytes
|
||||||
|
DBT_TEST_BINARY_SERIALIZATION: "true"
|
||||||
# Additional test users
|
# Additional test users
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
|
|||||||
107
.github/workflows/version-bump.yml
vendored
107
.github/workflows/version-bump.yml
vendored
@@ -20,106 +20,9 @@ on:
|
|||||||
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bump:
|
version_bump_and_changie:
|
||||||
runs-on: ubuntu-latest
|
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
|
||||||
steps:
|
with:
|
||||||
- name: "[DEBUG] Print Variables"
|
version_number: ${{ inputs.version_number }}
|
||||||
run: |
|
secrets: inherit # ok since what we are calling is internally maintained
|
||||||
echo "all variables defined as inputs"
|
|
||||||
echo The version_number: ${{ github.event.inputs.version_number }}
|
|
||||||
|
|
||||||
- name: Check out the repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.8"
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
|
||||||
python3 -m venv env
|
|
||||||
source env/bin/activate
|
|
||||||
pip install --upgrade pip
|
|
||||||
|
|
||||||
- name: Add Homebrew to PATH
|
|
||||||
run: |
|
|
||||||
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: Install Homebrew packages
|
|
||||||
run: |
|
|
||||||
brew install pre-commit
|
|
||||||
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
|
||||||
brew install changie
|
|
||||||
|
|
||||||
- name: Audit Version and Parse Into Parts
|
|
||||||
id: semver
|
|
||||||
uses: dbt-labs/actions/parse-semver@v1
|
|
||||||
with:
|
|
||||||
version: ${{ github.event.inputs.version_number }}
|
|
||||||
|
|
||||||
- name: Set branch value
|
|
||||||
id: variables
|
|
||||||
run: |
|
|
||||||
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Create PR branch
|
|
||||||
run: |
|
|
||||||
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
|
||||||
git push origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
|
||||||
git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}
|
|
||||||
|
|
||||||
- name: Bump version
|
|
||||||
run: |
|
|
||||||
source env/bin/activate
|
|
||||||
pip install -r dev-requirements.txt
|
|
||||||
env/bin/bumpversion --allow-dirty --new-version ${{ github.event.inputs.version_number }} major
|
|
||||||
git status
|
|
||||||
|
|
||||||
- name: Run changie
|
|
||||||
run: |
|
|
||||||
if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
|
|
||||||
then
|
|
||||||
changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'
|
|
||||||
else
|
|
||||||
changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases
|
|
||||||
fi
|
|
||||||
changie merge
|
|
||||||
git status
|
|
||||||
|
|
||||||
# this step will fail on whitespace errors but also correct them
|
|
||||||
- name: Remove trailing whitespace
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/*
|
|
||||||
git status
|
|
||||||
|
|
||||||
# this step will fail on newline errors but also correct them
|
|
||||||
- name: Removing extra newlines
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/*
|
|
||||||
git status
|
|
||||||
|
|
||||||
- name: Commit version bump to branch
|
|
||||||
uses: EndBug/add-and-commit@v7
|
|
||||||
with:
|
|
||||||
author_name: 'Github Build Bot'
|
|
||||||
author_email: 'buildbot@fishtownanalytics.com'
|
|
||||||
message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'
|
|
||||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
|
||||||
push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
|
||||||
uses: peter-evans/create-pull-request@v3
|
|
||||||
with:
|
|
||||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
|
||||||
base: ${{github.ref}}
|
|
||||||
title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'
|
|
||||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
|
||||||
labels: |
|
|
||||||
Skip Changelog
|
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -51,6 +51,7 @@ coverage.xml
|
|||||||
*,cover
|
*,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
test.env
|
test.env
|
||||||
|
makefile.test.env
|
||||||
*.pytest_cache/
|
*.pytest_cache/
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
82
CHANGELOG.md
82
CHANGELOG.md
@@ -5,6 +5,88 @@
|
|||||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|
||||||
|
## dbt-core 1.5.0-b1 - February 17, 2023
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Data type constraints are now native to SQL table materializations. Enforce columns are specific data types and not null depending on database functionality. ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||||
|
- Have dbt debug spit out structured json logs with flags enabled. ([#5353](https://github.com/dbt-labs/dbt-core/issues/5353))
|
||||||
|
- add adapter_response to dbt test and freshness result ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||||
|
- Improve error message for packages missing `dbt_project.yml` ([#6663](https://github.com/dbt-labs/dbt-core/issues/6663))
|
||||||
|
- Adjust makefile to have clearer instructions for CI env var changes. ([#6689](https://github.com/dbt-labs/dbt-core/issues/6689))
|
||||||
|
- Stand-alone Python module for PostgresColumn ([#6772](https://github.com/dbt-labs/dbt-core/issues/6772))
|
||||||
|
- Exposure owner requires one of name or email keys, and accepts additional arbitrary keys ([#6833](https://github.com/dbt-labs/dbt-core/issues/6833))
|
||||||
|
- Parse 'group' resource ([#6921](https://github.com/dbt-labs/dbt-core/issues/6921))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- add merge_exclude_columns adapter tests ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||||
|
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||||
|
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||||
|
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||||
|
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||||
|
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||||
|
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||||
|
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||||
|
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||||
|
- Reapply logging fixes which were accidentally reverted ([#6936](https://github.com/dbt-labs/dbt-core/issues/6936))
|
||||||
|
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||||
|
- Readd initialization events, --log-cache-events in new CLI ([#6933](https://github.com/dbt-labs/dbt-core/issues/6933))
|
||||||
|
- Fix previous state tests and disabled exposures, metrics ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752), [#6753](https://github.com/dbt-labs/dbt-core/issues/6753))
|
||||||
|
- Make use of hashlib.md5() FIPS compliant ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
|
||||||
|
- update link to installation instructions ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||||
|
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||||
|
- Searchable column descriptions ([dbt-docs/#140](https://github.com/dbt-labs/dbt-docs/issues/140), [dbt-docs/#322](https://github.com/dbt-labs/dbt-docs/issues/322), [dbt-docs/#369](https://github.com/dbt-labs/dbt-docs/issues/369))
|
||||||
|
|
||||||
|
### Under the Hood
|
||||||
|
|
||||||
|
- [CT-921] dbt compile works in click ([#5545](https://github.com/dbt-labs/dbt-core/issues/5545))
|
||||||
|
- Fix use of ConnectionReused logging event ([#6168](https://github.com/dbt-labs/dbt-core/issues/6168))
|
||||||
|
- Port docs tests to pytest ([#6573](https://github.com/dbt-labs/dbt-core/issues/6573))
|
||||||
|
- Update deprecated github action command ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||||
|
- dbt snapshot works in click ([#5554](https://github.com/dbt-labs/dbt-core/issues/5554))
|
||||||
|
- dbt list working with click ([#5549](https://github.com/dbt-labs/dbt-core/issues/5549))
|
||||||
|
- Add dbt run-operation to click CLI ([#5552](https://github.com/dbt-labs/dbt-core/issues/5552))
|
||||||
|
- dbt build working with new click framework ([#5541](https://github.com/dbt-labs/dbt-core/issues/5541))
|
||||||
|
- dbt docs generate works with new click framework ([#5543](https://github.com/dbt-labs/dbt-core/issues/5543))
|
||||||
|
- Replaced the EmptyLine event with a more general Formatting event, and added a Note event. ([#6481](https://github.com/dbt-labs/dbt-core/issues/6481))
|
||||||
|
- Small optimization on manifest parsing benefitting large DAGs ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||||
|
- Revised and simplified various structured logging events ([#6664](https://github.com/dbt-labs/dbt-core/issues/6664), [#6665](https://github.com/dbt-labs/dbt-core/issues/6665), [#6666](https://github.com/dbt-labs/dbt-core/issues/6666))
|
||||||
|
- dbt init works with click ([#5548](https://github.com/dbt-labs/dbt-core/issues/5548))
|
||||||
|
- [CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands ([#5544](https://github.com/dbt-labs/dbt-core/issues/5544), [#6722](https://github.com/dbt-labs/dbt-core/issues/6722))
|
||||||
|
- Migrate debug task to click ([#5546](https://github.com/dbt-labs/dbt-core/issues/5546))
|
||||||
|
- Optimized GraphQueue to remove graph analysis bottleneck in large dags. ([#6759](https://github.com/dbt-labs/dbt-core/issues/6759))
|
||||||
|
- Implement --version for click cli ([#6757](https://github.com/dbt-labs/dbt-core/issues/6757))
|
||||||
|
- [CT-1841] Convert custom target test to Pytest ([#6638](https://github.com/dbt-labs/dbt-core/issues/6638))
|
||||||
|
- Remove BigQuery-specific btye abbreviations ([#6741](https://github.com/dbt-labs/dbt-core/issues/6741))
|
||||||
|
- warn_error/warn_error_options mutual exclusivity in click ([#6579](https://github.com/dbt-labs/dbt-core/issues/6579))
|
||||||
|
- Enables the new Click Cli on the commandline! 🚀 ([#6784](https://github.com/dbt-labs/dbt-core/issues/6784))
|
||||||
|
- Lazily call --version ([#6812](https://github.com/dbt-labs/dbt-core/issues/6812))
|
||||||
|
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||||
|
- flags.THREADS defaults to None ([#6887](https://github.com/dbt-labs/dbt-core/issues/6887))
|
||||||
|
- Fixing target type exposure error ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||||
|
- Test binary serialization of logging events ([#6852](https://github.com/dbt-labs/dbt-core/issues/6852))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Bump ubuntu from 22.04 to 23.04 ([#6865](https://github.com/dbt-labs/dbt-core/pull/6865))
|
||||||
|
- Revert hoisting dbt.cli.main into the dbt.name namespace ([#](https://github.com/dbt-labs/dbt-core/pull/))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
- [@aezomz](https://github.com/aezomz) ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||||
|
- [@boxysean](https://github.com/boxysean) ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||||
|
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||||
|
- [@dave-connors-3](https://github.com/dave-connors-3) ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||||
|
- [@davidbloss](https://github.com/davidbloss) ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||||
|
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||||
|
- [@nielspardon](https://github.com/nielspardon) ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||||
|
- [@ryancharris](https://github.com/ryancharris) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||||
|
- [@sungchun12](https://github.com/sungchun12) ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||||
|
|
||||||
|
|
||||||
## Previous Releases
|
## Previous Releases
|
||||||
|
|
||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# See `/docker` for a generic and production-ready docker file
|
# See `/docker` for a generic and production-ready docker file
|
||||||
##
|
##
|
||||||
|
|
||||||
FROM ubuntu:22.04
|
FROM ubuntu:23.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
|
|
||||||
|
|||||||
34
Makefile
34
Makefile
@@ -6,18 +6,26 @@ ifeq ($(USE_DOCKER),true)
|
|||||||
DOCKER_CMD := docker-compose run --rm test
|
DOCKER_CMD := docker-compose run --rm test
|
||||||
endif
|
endif
|
||||||
|
|
||||||
LOGS_DIR := ./logs
|
#
|
||||||
|
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||||
|
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||||
|
# DBT_TEST_USER_1=user
|
||||||
|
# LOG_DIR="dir with a space in it"
|
||||||
|
#
|
||||||
|
# Warn: Restrict each line to one variable only.
|
||||||
|
#
|
||||||
|
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||||
|
include ./makefile.test.env
|
||||||
|
endif
|
||||||
|
|
||||||
# Optional flag to invoke tests using our CI env.
|
|
||||||
# But we always want these active for structured
|
|
||||||
# log testing.
|
|
||||||
CI_FLAGS =\
|
CI_FLAGS =\
|
||||||
DBT_TEST_USER_1=dbt_test_user_1\
|
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||||
DBT_TEST_USER_2=dbt_test_user_2\
|
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||||
DBT_TEST_USER_3=dbt_test_user_3\
|
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||||
RUSTFLAGS="-D warnings"\
|
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||||
LOG_DIR=./logs\
|
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||||
DBT_LOG_FORMAT=json
|
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||||
|
|
||||||
|
|
||||||
.PHONY: dev_req
|
.PHONY: dev_req
|
||||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||||
@@ -66,7 +74,7 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
|
|||||||
.PHONY: integration
|
.PHONY: integration
|
||||||
integration: .env ## Runs postgres integration tests with py-integration
|
integration: .env ## Runs postgres integration tests with py-integration
|
||||||
@\
|
@\
|
||||||
$(if $(USE_CI_FLAGS), $(CI_FLAGS)) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||||
|
|
||||||
.PHONY: integration-fail-fast
|
.PHONY: integration-fail-fast
|
||||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||||
@@ -76,9 +84,9 @@ integration-fail-fast: .env ## Runs postgres integration tests with py-integrati
|
|||||||
.PHONY: interop
|
.PHONY: interop
|
||||||
interop: clean
|
interop: clean
|
||||||
@\
|
@\
|
||||||
mkdir $(LOGS_DIR) && \
|
mkdir $(LOG_DIR) && \
|
||||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||||
LOG_DIR=$(LOGS_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||||
|
|
||||||
.PHONY: setup-db
|
.PHONY: setup-db
|
||||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
|||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||||
|
|
||||||
## Join the dbt Community
|
## Join the dbt Community
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ from typing import (
|
|||||||
Iterator,
|
Iterator,
|
||||||
Set,
|
Set,
|
||||||
)
|
)
|
||||||
|
|
||||||
import agate
|
import agate
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
@@ -54,7 +53,7 @@ from dbt.events.types import (
|
|||||||
CodeExecutionStatus,
|
CodeExecutionStatus,
|
||||||
CatalogGenerationError,
|
CatalogGenerationError,
|
||||||
)
|
)
|
||||||
from dbt.utils import filter_null_values, executor, cast_to_str
|
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||||
|
|
||||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||||
from dbt.adapters.base.meta import AdapterMeta, available
|
from dbt.adapters.base.meta import AdapterMeta, available
|
||||||
@@ -943,7 +942,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
context_override: Optional[Dict[str, Any]] = None,
|
context_override: Optional[Dict[str, Any]] = None,
|
||||||
kwargs: Dict[str, Any] = None,
|
kwargs: Dict[str, Any] = None,
|
||||||
text_only_columns: Optional[Iterable[str]] = None,
|
text_only_columns: Optional[Iterable[str]] = None,
|
||||||
) -> agate.Table:
|
) -> AttrDict:
|
||||||
"""Look macro_name up in the manifest and execute its results.
|
"""Look macro_name up in the manifest and execute its results.
|
||||||
|
|
||||||
:param macro_name: The name of the macro to execute.
|
:param macro_name: The name of the macro to execute.
|
||||||
@@ -1028,7 +1027,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
manifest=manifest,
|
manifest=manifest,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = self._catalog_filter_table(table, manifest)
|
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||||
@@ -1060,7 +1059,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
loaded_at_field: str,
|
loaded_at_field: str,
|
||||||
filter: Optional[str],
|
filter: Optional[str],
|
||||||
manifest: Optional[Manifest] = None,
|
manifest: Optional[Manifest] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Tuple[AdapterResponse, Dict[str, Any]]:
|
||||||
"""Calculate the freshness of sources in dbt, and return it"""
|
"""Calculate the freshness of sources in dbt, and return it"""
|
||||||
kwargs: Dict[str, Any] = {
|
kwargs: Dict[str, Any] = {
|
||||||
"source": source,
|
"source": source,
|
||||||
@@ -1069,7 +1068,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
}
|
}
|
||||||
|
|
||||||
# run the macro
|
# run the macro
|
||||||
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||||
|
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||||
# the current time according to the db.
|
# the current time according to the db.
|
||||||
if len(table) != 1 or len(table[0]) != 2:
|
if len(table) != 1 or len(table[0]) != 2:
|
||||||
@@ -1083,11 +1083,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
|
|
||||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||||
return {
|
freshness = {
|
||||||
"max_loaded_at": max_loaded_at,
|
"max_loaded_at": max_loaded_at,
|
||||||
"snapshotted_at": snapshotted_at,
|
"snapshotted_at": snapshotted_at,
|
||||||
"age": age,
|
"age": age,
|
||||||
}
|
}
|
||||||
|
return adapter_response, freshness
|
||||||
|
|
||||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||||
"""A hook for running some operation before the model materialization
|
"""A hook for running some operation before the model materialization
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ from dbt.adapters.protocol import AdapterProtocol
|
|||||||
|
|
||||||
def project_name_from_path(include_path: str) -> str:
|
def project_name_from_path(include_path: str) -> str:
|
||||||
# avoid an import cycle
|
# avoid an import cycle
|
||||||
from dbt.config.project import Project
|
from dbt.config.project import PartialProject
|
||||||
|
|
||||||
partial = Project.partial_load(include_path)
|
partial = PartialProject.from_project_root(include_path)
|
||||||
if partial.project_name is None:
|
if partial.project_name is None:
|
||||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||||
return partial.project_name
|
return partial.project_name
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ from dbt.exceptions import (
|
|||||||
)
|
)
|
||||||
from dbt.events.functions import fire_event, fire_event_if
|
from dbt.events.functions import fire_event, fire_event_if
|
||||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||||
import dbt.flags as flags
|
from dbt.flags import get_flags
|
||||||
from dbt.utils import lowercase
|
from dbt.utils import lowercase
|
||||||
|
|
||||||
|
|
||||||
@@ -319,6 +319,7 @@ class RelationsCache:
|
|||||||
|
|
||||||
:param BaseRelation relation: The underlying relation.
|
:param BaseRelation relation: The underlying relation.
|
||||||
"""
|
"""
|
||||||
|
flags = get_flags()
|
||||||
cached = _CachedRelation(relation)
|
cached = _CachedRelation(relation)
|
||||||
fire_event_if(
|
fire_event_if(
|
||||||
flags.LOG_CACHE_EVENTS,
|
flags.LOG_CACHE_EVENTS,
|
||||||
@@ -456,7 +457,7 @@ class RelationsCache:
|
|||||||
ref_key_2=_make_msg_from_ref_key(new),
|
ref_key_2=_make_msg_from_ref_key(new),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
flags = get_flags()
|
||||||
fire_event_if(
|
fire_event_if(
|
||||||
flags.LOG_CACHE_EVENTS,
|
flags.LOG_CACHE_EVENTS,
|
||||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
from .main import cli as dbt_cli # noqa
|
||||||
|
|||||||
16
core/dbt/cli/context.py
Normal file
16
core/dbt/cli/context.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import click
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from dbt.cli.main import cli as dbt
|
||||||
|
|
||||||
|
|
||||||
|
def make_context(args, command=dbt) -> Optional[click.Context]:
|
||||||
|
try:
|
||||||
|
ctx = command.make_context(command.name, args)
|
||||||
|
except click.exceptions.Exit:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ctx.invoked_subcommand = ctx.protected_args[0] if ctx.protected_args else None
|
||||||
|
ctx.obj = {}
|
||||||
|
|
||||||
|
return ctx
|
||||||
20
core/dbt/cli/example.py
Normal file
20
core/dbt/cli/example.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from dbt.cli.main import dbtRunner
|
||||||
|
from dbt.config.runtime import load_profile, load_project
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
project_dir = "/Users/chenyuli/git/jaffle_shop"
|
||||||
|
cli_args = ["run", "--project-dir", project_dir]
|
||||||
|
|
||||||
|
# initialize the dbt runner
|
||||||
|
dbt = dbtRunner()
|
||||||
|
# run the command
|
||||||
|
res, success = dbt.invoke(cli_args)
|
||||||
|
|
||||||
|
# preload profile and project
|
||||||
|
profile = load_profile(project_dir, {}, "testing-postgres")
|
||||||
|
project = load_project(project_dir, False, profile, {})
|
||||||
|
|
||||||
|
# initialize the runner with pre-loaded profile and project, you can also pass in a preloaded manifest
|
||||||
|
dbt = dbtRunner(profile=profile, project=project)
|
||||||
|
# run the command, this will use the pre-loaded profile and project instead of loading
|
||||||
|
res, success = dbt.invoke(cli_args)
|
||||||
@@ -1,44 +1,193 @@
|
|||||||
# TODO Move this to /core/dbt/flags.py when we're ready to break things
|
# TODO Move this to /core/dbt/flags.py when we're ready to break things
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from importlib import import_module
|
||||||
from multiprocessing import get_context
|
from multiprocessing import get_context
|
||||||
from pprint import pformat as pf
|
from pprint import pformat as pf
|
||||||
|
from typing import Set, List
|
||||||
|
|
||||||
|
from click import Context, get_current_context, BadOptionUsage
|
||||||
|
from click.core import ParameterSource, Command, Group
|
||||||
|
|
||||||
|
from dbt.config.profile import read_user_config
|
||||||
|
from dbt.contracts.project import UserConfig
|
||||||
|
from dbt.helper_types import WarnErrorOptions
|
||||||
|
from dbt.cli.resolvers import default_project_dir, default_log_path
|
||||||
|
|
||||||
from click import get_current_context
|
|
||||||
|
|
||||||
if os.name != "nt":
|
if os.name != "nt":
|
||||||
# https://bugs.python.org/issue41567
|
# https://bugs.python.org/issue41567
|
||||||
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
|
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
|
||||||
|
|
||||||
|
# TODO anything that has a default in params should be removed here?
|
||||||
|
# Or maybe only the ones that's in the root click group
|
||||||
|
FLAGS_DEFAULTS = {
|
||||||
|
"INDIRECT_SELECTION": "eager",
|
||||||
|
"TARGET_PATH": None,
|
||||||
|
# cli args without user_config or env var option
|
||||||
|
"FULL_REFRESH": False,
|
||||||
|
"STRICT_MODE": False,
|
||||||
|
"STORE_FAILURES": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# For backwards compatability, some params are defined across multiple levels,
|
||||||
|
# Top-level value should take precedence.
|
||||||
|
# e.g. dbt --target-path test2 run --target-path test2
|
||||||
|
EXPECTED_DUPLICATE_PARAMS = [
|
||||||
|
"full_refresh",
|
||||||
|
"target_path",
|
||||||
|
"version_check",
|
||||||
|
"fail_fast",
|
||||||
|
"indirect_selection",
|
||||||
|
"store_failures",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def convert_config(config_name, config_value):
|
||||||
|
# This function should take care of converting the values from config and original
|
||||||
|
# set_from_args to the correct type
|
||||||
|
ret = config_value
|
||||||
|
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
|
||||||
|
ret = WarnErrorOptions(
|
||||||
|
include=config_value.get("include", []), exclude=config_value.get("exclude", [])
|
||||||
|
)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def args_to_context(args: List[str]) -> Context:
|
||||||
|
"""Convert a list of args to a click context with proper hierarchy for dbt commands"""
|
||||||
|
from dbt.cli.main import cli
|
||||||
|
|
||||||
|
cli_ctx = cli.make_context(cli.name, args)
|
||||||
|
# args would get converted during make context
|
||||||
|
if len(args) == 1 and "," in args[0]:
|
||||||
|
args = args[0].split(",")
|
||||||
|
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||||
|
|
||||||
|
# handle source and docs group
|
||||||
|
if type(sub_command) == Group:
|
||||||
|
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||||
|
|
||||||
|
assert type(sub_command) == Command
|
||||||
|
sub_command_ctx = sub_command.make_context(sub_command_name, args)
|
||||||
|
sub_command_ctx.parent = cli_ctx
|
||||||
|
return sub_command_ctx
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class Flags:
|
class Flags:
|
||||||
def __init__(self, ctx=None) -> None:
|
def __init__(self, ctx: Context = None, user_config: UserConfig = None) -> None:
|
||||||
|
|
||||||
|
# set the default flags
|
||||||
|
for key, value in FLAGS_DEFAULTS.items():
|
||||||
|
object.__setattr__(self, key, value)
|
||||||
|
|
||||||
if ctx is None:
|
if ctx is None:
|
||||||
ctx = get_current_context()
|
ctx = get_current_context()
|
||||||
|
|
||||||
def assign_params(ctx):
|
def assign_params(ctx, params_assigned_from_default):
|
||||||
"""Recursively adds all click params to flag object"""
|
"""Recursively adds all click params to flag object"""
|
||||||
for param_name, param_value in ctx.params.items():
|
for param_name, param_value in ctx.params.items():
|
||||||
|
# TODO: this is to avoid duplicate params being defined in two places (version_check in run and cli)
|
||||||
|
# However this is a bit of a hack and we should find a better way to do this
|
||||||
|
|
||||||
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
|
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
|
||||||
# when using frozen dataclasses.
|
# when using frozen dataclasses.
|
||||||
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
|
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
|
||||||
if hasattr(self, param_name):
|
if hasattr(self, param_name.upper()):
|
||||||
raise Exception(f"Duplicate flag names found in click command: {param_name}")
|
if param_name not in EXPECTED_DUPLICATE_PARAMS:
|
||||||
object.__setattr__(self, param_name.upper(), param_value)
|
raise Exception(
|
||||||
if ctx.parent:
|
f"Duplicate flag names found in click command: {param_name}"
|
||||||
assign_params(ctx.parent)
|
)
|
||||||
|
else:
|
||||||
|
# Expected duplicate param from multi-level click command (ex: dbt --full_refresh run --full_refresh)
|
||||||
|
# Overwrite user-configured param with value from parent context
|
||||||
|
if ctx.get_parameter_source(param_name) != ParameterSource.DEFAULT:
|
||||||
|
object.__setattr__(self, param_name.upper(), param_value)
|
||||||
|
else:
|
||||||
|
object.__setattr__(self, param_name.upper(), param_value)
|
||||||
|
if ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT:
|
||||||
|
params_assigned_from_default.add(param_name)
|
||||||
|
|
||||||
assign_params(ctx)
|
if ctx.parent:
|
||||||
|
assign_params(ctx.parent, params_assigned_from_default)
|
||||||
|
|
||||||
|
params_assigned_from_default = set() # type: Set[str]
|
||||||
|
assign_params(ctx, params_assigned_from_default)
|
||||||
|
|
||||||
|
# Get the invoked command flags
|
||||||
|
invoked_subcommand_name = (
|
||||||
|
ctx.invoked_subcommand if hasattr(ctx, "invoked_subcommand") else None
|
||||||
|
)
|
||||||
|
if invoked_subcommand_name is not None:
|
||||||
|
invoked_subcommand = getattr(import_module("dbt.cli.main"), invoked_subcommand_name)
|
||||||
|
invoked_subcommand.allow_extra_args = True
|
||||||
|
invoked_subcommand.ignore_unknown_options = True
|
||||||
|
invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv)
|
||||||
|
assign_params(invoked_subcommand_ctx, params_assigned_from_default)
|
||||||
|
|
||||||
|
if not user_config:
|
||||||
|
profiles_dir = getattr(self, "PROFILES_DIR", None)
|
||||||
|
user_config = read_user_config(profiles_dir) if profiles_dir else None
|
||||||
|
|
||||||
|
# Overwrite default assignments with user config if available
|
||||||
|
if user_config:
|
||||||
|
param_assigned_from_default_copy = params_assigned_from_default.copy()
|
||||||
|
for param_assigned_from_default in params_assigned_from_default:
|
||||||
|
user_config_param_value = getattr(user_config, param_assigned_from_default, None)
|
||||||
|
if user_config_param_value is not None:
|
||||||
|
object.__setattr__(
|
||||||
|
self,
|
||||||
|
param_assigned_from_default.upper(),
|
||||||
|
convert_config(param_assigned_from_default, user_config_param_value),
|
||||||
|
)
|
||||||
|
param_assigned_from_default_copy.remove(param_assigned_from_default)
|
||||||
|
params_assigned_from_default = param_assigned_from_default_copy
|
||||||
|
|
||||||
# Hard coded flags
|
# Hard coded flags
|
||||||
object.__setattr__(self, "WHICH", ctx.info_name)
|
object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name)
|
||||||
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
|
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
|
||||||
|
|
||||||
|
# Default LOG_PATH from PROJECT_DIR, if available.
|
||||||
|
if getattr(self, "LOG_PATH", None) is None:
|
||||||
|
project_dir = getattr(self, "PROJECT_DIR", default_project_dir())
|
||||||
|
version_check = getattr(self, "VERSION_CHECK", True)
|
||||||
|
object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check))
|
||||||
|
|
||||||
# Support console DO NOT TRACK initiave
|
# Support console DO NOT TRACK initiave
|
||||||
if os.getenv("DO_NOT_TRACK", "").lower() in (1, "t", "true", "y", "yes"):
|
if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"):
|
||||||
object.__setattr__(self, "ANONYMOUS_USAGE_STATS", False)
|
object.__setattr__(self, "SEND_ANONYMOUS_USAGE_STATS", False)
|
||||||
|
|
||||||
|
# Check mutual exclusivity once all flags are set
|
||||||
|
self._assert_mutually_exclusive(
|
||||||
|
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Support lower cased access for legacy code
|
||||||
|
params = set(
|
||||||
|
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||||
|
)
|
||||||
|
for param in params:
|
||||||
|
object.__setattr__(self, param.lower(), getattr(self, param))
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return str(pf(self.__dict__))
|
return str(pf(self.__dict__))
|
||||||
|
|
||||||
|
def _assert_mutually_exclusive(
|
||||||
|
self, params_assigned_from_default: Set[str], group: List[str]
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Ensure no elements from group are simultaneously provided by a user, as inferred from params_assigned_from_default.
|
||||||
|
Raises click.UsageError if any two elements from group are simultaneously provided by a user.
|
||||||
|
"""
|
||||||
|
set_flag = None
|
||||||
|
for flag in group:
|
||||||
|
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||||
|
if flag_set_by_user and set_flag:
|
||||||
|
raise BadOptionUsage(
|
||||||
|
flag.lower(), f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||||
|
)
|
||||||
|
elif flag_set_by_user:
|
||||||
|
set_flag = flag
|
||||||
|
|||||||
@@ -1,22 +1,71 @@
|
|||||||
import inspect # This is temporary for RAT-ing
|
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from pprint import pformat as pf # This is temporary for RAT-ing
|
from typing import List, Tuple, Optional
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from dbt.adapters.factory import adapter_management
|
from dbt.cli import requires, params as p
|
||||||
from dbt.cli import params as p
|
from dbt.config.project import Project
|
||||||
from dbt.cli.flags import Flags
|
from dbt.config.profile import Profile
|
||||||
from dbt.profiler import profiler
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.task.clean import CleanTask
|
||||||
|
from dbt.task.compile import CompileTask
|
||||||
|
from dbt.task.deps import DepsTask
|
||||||
|
from dbt.task.debug import DebugTask
|
||||||
|
from dbt.task.run import RunTask
|
||||||
|
from dbt.task.serve import ServeTask
|
||||||
|
from dbt.task.test import TestTask
|
||||||
|
from dbt.task.snapshot import SnapshotTask
|
||||||
|
from dbt.task.seed import SeedTask
|
||||||
|
from dbt.task.list import ListTask
|
||||||
|
from dbt.task.freshness import FreshnessTask
|
||||||
|
from dbt.task.run_operation import RunOperationTask
|
||||||
|
from dbt.task.build import BuildTask
|
||||||
|
from dbt.task.generate import GenerateTask
|
||||||
|
from dbt.task.init import InitTask
|
||||||
|
|
||||||
|
|
||||||
def cli_runner():
|
class dbtUsageException(Exception):
|
||||||
# Alias "list" to "ls"
|
pass
|
||||||
ls = copy(cli.commands["list"])
|
|
||||||
ls.hidden = True
|
|
||||||
cli.add_command(ls, "ls")
|
|
||||||
|
|
||||||
# Run the cli
|
|
||||||
cli()
|
class dbtInternalException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Programmatic invocation
|
||||||
|
class dbtRunner:
|
||||||
|
def __init__(
|
||||||
|
self, project: Project = None, profile: Profile = None, manifest: Manifest = None
|
||||||
|
):
|
||||||
|
self.project = project
|
||||||
|
self.profile = profile
|
||||||
|
self.manifest = manifest
|
||||||
|
|
||||||
|
def invoke(self, args: List[str]) -> Tuple[Optional[List], bool]:
|
||||||
|
try:
|
||||||
|
dbt_ctx = cli.make_context(cli.name, args)
|
||||||
|
dbt_ctx.obj = {
|
||||||
|
"project": self.project,
|
||||||
|
"profile": self.profile,
|
||||||
|
"manifest": self.manifest,
|
||||||
|
}
|
||||||
|
return cli.invoke(dbt_ctx)
|
||||||
|
except click.exceptions.Exit as e:
|
||||||
|
# 0 exit code, expected for --version early exit
|
||||||
|
if str(e) == "0":
|
||||||
|
return [], True
|
||||||
|
raise dbtInternalException(f"unhandled exit code {str(e)}")
|
||||||
|
except (click.NoSuchOption, click.UsageError) as e:
|
||||||
|
raise dbtUsageException(e.message)
|
||||||
|
|
||||||
|
|
||||||
|
def handle(args):
|
||||||
|
res, _ = handle_and_check(args)
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def handle_and_check(args):
|
||||||
|
dbt = dbtRunner()
|
||||||
|
return dbt.invoke(args)
|
||||||
|
|
||||||
|
|
||||||
# dbt
|
# dbt
|
||||||
@@ -27,19 +76,21 @@ def cli_runner():
|
|||||||
epilog="Specify one of these sub-commands and you can find more help from there.",
|
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.anonymous_usage_stats
|
@p.send_anonymous_usage_stats
|
||||||
@p.cache_selected_only
|
@p.cache_selected_only
|
||||||
@p.debug
|
@p.debug
|
||||||
@p.enable_legacy_logger
|
@p.enable_legacy_logger
|
||||||
@p.fail_fast
|
@p.fail_fast
|
||||||
@p.log_cache_events
|
@p.log_cache_events
|
||||||
@p.log_format
|
@p.log_format
|
||||||
|
@p.log_path
|
||||||
@p.macro_debugging
|
@p.macro_debugging
|
||||||
@p.partial_parse
|
@p.partial_parse
|
||||||
@p.print
|
@p.print
|
||||||
@p.printer_width
|
@p.printer_width
|
||||||
@p.quiet
|
@p.quiet
|
||||||
@p.record_timing_info
|
@p.record_timing_info
|
||||||
|
@p.single_threaded
|
||||||
@p.static_parser
|
@p.static_parser
|
||||||
@p.use_colors
|
@p.use_colors
|
||||||
@p.use_experimental_parser
|
@p.use_experimental_parser
|
||||||
@@ -52,21 +103,6 @@ def cli(ctx, **kwargs):
|
|||||||
"""An ELT tool for managing your SQL transformations and data models.
|
"""An ELT tool for managing your SQL transformations and data models.
|
||||||
For more documentation on these commands, visit: docs.getdbt.com
|
For more documentation on these commands, visit: docs.getdbt.com
|
||||||
"""
|
"""
|
||||||
incomplete_flags = Flags()
|
|
||||||
|
|
||||||
# Profiling
|
|
||||||
if incomplete_flags.RECORD_TIMING_INFO:
|
|
||||||
ctx.with_resource(profiler(enable=True, outfile=incomplete_flags.RECORD_TIMING_INFO))
|
|
||||||
|
|
||||||
# Adapter management
|
|
||||||
ctx.with_resource(adapter_management())
|
|
||||||
|
|
||||||
# Version info
|
|
||||||
if incomplete_flags.VERSION:
|
|
||||||
click.echo(f"`version` called\n ctx.params: {pf(ctx.params)}")
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
del ctx.params["version"]
|
|
||||||
|
|
||||||
|
|
||||||
# dbt build
|
# dbt build
|
||||||
@@ -75,13 +111,14 @@ def cli(ctx, **kwargs):
|
|||||||
@p.defer
|
@p.defer
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.fail_fast
|
@p.fail_fast
|
||||||
|
@p.favor_state
|
||||||
@p.full_refresh
|
@p.full_refresh
|
||||||
@p.indirect_selection
|
@p.indirect_selection
|
||||||
@p.log_path
|
|
||||||
@p.models
|
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.resource_type
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.show
|
@p.show
|
||||||
@p.state
|
@p.state
|
||||||
@@ -91,10 +128,22 @@ def cli(ctx, **kwargs):
|
|||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def build(ctx, **kwargs):
|
def build(ctx, **kwargs):
|
||||||
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
|
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
|
||||||
flags = Flags()
|
task = BuildTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt clean
|
# dbt clean
|
||||||
@@ -105,10 +154,16 @@ def build(ctx, **kwargs):
|
|||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.unset_profile
|
||||||
|
@requires.project
|
||||||
def clean(ctx, **kwargs):
|
def clean(ctx, **kwargs):
|
||||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||||
flags = Flags()
|
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt docs
|
# dbt docs
|
||||||
@@ -124,11 +179,11 @@ def docs(ctx, **kwargs):
|
|||||||
@p.compile_docs
|
@p.compile_docs
|
||||||
@p.defer
|
@p.defer
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.log_path
|
@p.favor_state
|
||||||
@p.models
|
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.target
|
@p.target
|
||||||
@@ -136,10 +191,22 @@ def docs(ctx, **kwargs):
|
|||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest(write=False)
|
||||||
def docs_generate(ctx, **kwargs):
|
def docs_generate(ctx, **kwargs):
|
||||||
"""Generate the documentation website for your project"""
|
"""Generate the documentation website for your project"""
|
||||||
flags = Flags()
|
task = GenerateTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt docs serve
|
# dbt docs serve
|
||||||
@@ -152,10 +219,22 @@ def docs_generate(ctx, **kwargs):
|
|||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def docs_serve(ctx, **kwargs):
|
def docs_serve(ctx, **kwargs):
|
||||||
"""Serve the documentation website for your project"""
|
"""Serve the documentation website for your project"""
|
||||||
flags = Flags()
|
task = ServeTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt compile
|
# dbt compile
|
||||||
@@ -163,13 +242,13 @@ def docs_serve(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.defer
|
@p.defer
|
||||||
@p.exclude
|
@p.exclude
|
||||||
|
@p.favor_state
|
||||||
@p.full_refresh
|
@p.full_refresh
|
||||||
@p.log_path
|
|
||||||
@p.models
|
|
||||||
@p.parse_only
|
@p.parse_only
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.target
|
@p.target
|
||||||
@@ -177,10 +256,23 @@ def docs_serve(ctx, **kwargs):
|
|||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def compile(ctx, **kwargs):
|
def compile(ctx, **kwargs):
|
||||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory."""
|
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the
|
||||||
flags = Flags()
|
target/ directory."""
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
task = CompileTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt debug
|
# dbt debug
|
||||||
@@ -188,15 +280,22 @@ def compile(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.config_dir
|
@p.config_dir
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir_exists_false
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
def debug(ctx, **kwargs):
|
def debug(ctx, **kwargs):
|
||||||
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
|
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
|
||||||
flags = Flags()
|
task = DebugTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt deps
|
# dbt deps
|
||||||
@@ -207,25 +306,36 @@ def debug(ctx, **kwargs):
|
|||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.unset_profile
|
||||||
|
@requires.project
|
||||||
def deps(ctx, **kwargs):
|
def deps(ctx, **kwargs):
|
||||||
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||||
flags = Flags()
|
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt init
|
# dbt init
|
||||||
@cli.command("init")
|
@cli.command("init")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
|
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||||
|
@click.argument("project_name", required=False)
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.skip_profile_setup
|
@p.skip_profile_setup
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
def init(ctx, **kwargs):
|
def init(ctx, **kwargs):
|
||||||
"""Initialize a new DBT project."""
|
"""Initialize a new dbt project."""
|
||||||
flags = Flags()
|
task = InitTask(ctx.obj["flags"], None)
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt list
|
# dbt list
|
||||||
@@ -240,21 +350,39 @@ def init(ctx, **kwargs):
|
|||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.resource_type
|
@p.resource_type
|
||||||
|
@p.raw_select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def list(ctx, **kwargs):
|
def list(ctx, **kwargs):
|
||||||
"""List the resources in your project"""
|
"""List the resources in your project"""
|
||||||
flags = Flags()
|
task = ListTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# Alias "list" to "ls"
|
||||||
|
ls = copy(cli.commands["list"])
|
||||||
|
ls.hidden = True
|
||||||
|
cli.add_command(ls, "ls")
|
||||||
|
|
||||||
|
|
||||||
# dbt parse
|
# dbt parse
|
||||||
@cli.command("parse")
|
@cli.command("parse")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.compile_parse
|
@p.compile_parse
|
||||||
@p.log_path
|
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@@ -264,24 +392,29 @@ def list(ctx, **kwargs):
|
|||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
@p.write_manifest
|
@p.write_manifest
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest(write_perf_info=True)
|
||||||
def parse(ctx, **kwargs):
|
def parse(ctx, **kwargs):
|
||||||
"""Parses the project and provides information on performance"""
|
"""Parses the project and provides information on performance"""
|
||||||
flags = Flags()
|
# manifest generation and writing happens in @requires.manifest
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
return None, True
|
||||||
|
|
||||||
|
|
||||||
# dbt run
|
# dbt run
|
||||||
@cli.command("run")
|
@cli.command("run")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.defer
|
@p.defer
|
||||||
|
@p.favor_state
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.fail_fast
|
@p.fail_fast
|
||||||
@p.full_refresh
|
@p.full_refresh
|
||||||
@p.log_path
|
|
||||||
@p.models
|
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.target
|
@p.target
|
||||||
@@ -289,25 +422,50 @@ def parse(ctx, **kwargs):
|
|||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def run(ctx, **kwargs):
|
def run(ctx, **kwargs):
|
||||||
"""Compile SQL and execute against the current target database."""
|
"""Compile SQL and execute against the current target database."""
|
||||||
flags = Flags()
|
task = RunTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt run operation
|
# dbt run operation
|
||||||
@cli.command("run-operation")
|
@cli.command("run-operation")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
|
@click.argument("macro")
|
||||||
@p.args
|
@p.args
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.target
|
@p.target
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def run_operation(ctx, **kwargs):
|
def run_operation(ctx, **kwargs):
|
||||||
"""Run the named macro with any supplied arguments."""
|
"""Run the named macro with any supplied arguments."""
|
||||||
flags = Flags()
|
task = RunOperationTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt seed
|
# dbt seed
|
||||||
@@ -315,11 +473,10 @@ def run_operation(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.full_refresh
|
@p.full_refresh
|
||||||
@p.log_path
|
|
||||||
@p.models
|
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.show
|
@p.show
|
||||||
@p.state
|
@p.state
|
||||||
@@ -328,10 +485,21 @@ def run_operation(ctx, **kwargs):
|
|||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def seed(ctx, **kwargs):
|
def seed(ctx, **kwargs):
|
||||||
"""Load data from csv files into your data warehouse."""
|
"""Load data from csv files into your data warehouse."""
|
||||||
flags = Flags()
|
task = SeedTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt snapshot
|
# dbt snapshot
|
||||||
@@ -339,19 +507,32 @@ def seed(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.defer
|
@p.defer
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.models
|
@p.favor_state
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.target
|
@p.target
|
||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def snapshot(ctx, **kwargs):
|
def snapshot(ctx, **kwargs):
|
||||||
"""Execute snapshots defined in your project"""
|
"""Execute snapshots defined in your project"""
|
||||||
flags = Flags()
|
task = SnapshotTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# dbt source
|
# dbt source
|
||||||
@@ -365,20 +546,38 @@ def source(ctx, **kwargs):
|
|||||||
@source.command("freshness")
|
@source.command("freshness")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.models
|
|
||||||
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.target
|
@p.target
|
||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def freshness(ctx, **kwargs):
|
def freshness(ctx, **kwargs):
|
||||||
"""Snapshots the current freshness of the project's sources"""
|
"""check the current freshness of the project's sources"""
|
||||||
flags = Flags()
|
task = FreshnessTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# Alias "source freshness" to "snapshot-freshness"
|
||||||
|
snapshot_freshness = copy(cli.commands["source"].commands["freshness"]) # type: ignore
|
||||||
|
snapshot_freshness.hidden = True
|
||||||
|
cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
# dbt test
|
# dbt test
|
||||||
@@ -387,12 +586,12 @@ def freshness(ctx, **kwargs):
|
|||||||
@p.defer
|
@p.defer
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.fail_fast
|
@p.fail_fast
|
||||||
|
@p.favor_state
|
||||||
@p.indirect_selection
|
@p.indirect_selection
|
||||||
@p.log_path
|
|
||||||
@p.models
|
|
||||||
@p.profile
|
@p.profile
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.state
|
@p.state
|
||||||
@p.store_failures
|
@p.store_failures
|
||||||
@@ -401,12 +600,24 @@ def freshness(ctx, **kwargs):
|
|||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@p.version_check
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
def test(ctx, **kwargs):
|
def test(ctx, **kwargs):
|
||||||
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||||
flags = Flags()
|
task = TestTask(
|
||||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
# Support running as a module
|
# Support running as a module
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
cli_runner()
|
cli()
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
from click import ParamType
|
from click import ParamType, Choice
|
||||||
import yaml
|
|
||||||
|
from dbt.config.utils import parse_cli_vars
|
||||||
|
from dbt.exceptions import ValidationError
|
||||||
|
|
||||||
from dbt.helper_types import WarnErrorOptions
|
from dbt.helper_types import WarnErrorOptions
|
||||||
|
|
||||||
@@ -14,8 +16,8 @@ class YAML(ParamType):
|
|||||||
if not isinstance(value, str):
|
if not isinstance(value, str):
|
||||||
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
|
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
|
||||||
try:
|
try:
|
||||||
return yaml.load(value, Loader=yaml.Loader)
|
return parse_cli_vars(value)
|
||||||
except yaml.parser.ParserError:
|
except ValidationError:
|
||||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||||
|
|
||||||
|
|
||||||
@@ -25,6 +27,7 @@ class WarnErrorOptionsType(YAML):
|
|||||||
name = "WarnErrorOptionsType"
|
name = "WarnErrorOptionsType"
|
||||||
|
|
||||||
def convert(self, value, param, ctx):
|
def convert(self, value, param, ctx):
|
||||||
|
# this function is being used by param in click
|
||||||
include_exclude = super().convert(value, param, ctx)
|
include_exclude = super().convert(value, param, ctx)
|
||||||
|
|
||||||
return WarnErrorOptions(
|
return WarnErrorOptions(
|
||||||
@@ -46,3 +49,13 @@ class Truthy(ParamType):
|
|||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class ChoiceTuple(Choice):
|
||||||
|
name = "CHOICE_TUPLE"
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
for value_item in value:
|
||||||
|
super().convert(value_item, param, ctx)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|||||||
44
core/dbt/cli/options.py
Normal file
44
core/dbt/cli/options.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
|
||||||
|
# Implementation from: https://stackoverflow.com/a/48394004
|
||||||
|
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||||
|
class MultiOption(click.Option):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||||
|
nargs = kwargs.pop("nargs", -1)
|
||||||
|
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||||
|
super(MultiOption, self).__init__(*args, **kwargs)
|
||||||
|
self._previous_parser_process = None
|
||||||
|
self._eat_all_parser = None
|
||||||
|
|
||||||
|
def add_to_parser(self, parser, ctx):
|
||||||
|
def parser_process(value, state):
|
||||||
|
# method to hook to the parser.process
|
||||||
|
done = False
|
||||||
|
value = [value]
|
||||||
|
if self.save_other_options:
|
||||||
|
# grab everything up to the next option
|
||||||
|
while state.rargs and not done:
|
||||||
|
for prefix in self._eat_all_parser.prefixes:
|
||||||
|
if state.rargs[0].startswith(prefix):
|
||||||
|
done = True
|
||||||
|
if not done:
|
||||||
|
value.append(state.rargs.pop(0))
|
||||||
|
else:
|
||||||
|
# grab everything remaining
|
||||||
|
value += state.rargs
|
||||||
|
state.rargs[:] = []
|
||||||
|
value = tuple(value)
|
||||||
|
# call the actual process
|
||||||
|
self._previous_parser_process(value, state)
|
||||||
|
|
||||||
|
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||||
|
for name in self.opts:
|
||||||
|
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||||
|
if our_parser:
|
||||||
|
self._eat_all_parser = our_parser
|
||||||
|
self._previous_parser_process = our_parser.process
|
||||||
|
our_parser.process = parser_process
|
||||||
|
break
|
||||||
|
return retval
|
||||||
@@ -1,17 +1,15 @@
|
|||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from dbt.cli.option_types import YAML, WarnErrorOptionsType
|
from dbt.cli.options import MultiOption
|
||||||
|
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||||
|
from dbt.version import get_version_information
|
||||||
|
|
||||||
|
# TODO: Rename this to meet naming conventions (the word "send" is redundant)
|
||||||
# TODO: The name (reflected in flags) is a correction!
|
send_anonymous_usage_stats = click.option(
|
||||||
# The original name was `SEND_ANONYMOUS_USAGE_STATS` and used an env var called "DBT_SEND_ANONYMOUS_USAGE_STATS"
|
"--send-anonymous-usage-stats/--no-send-anonymous-usage-stats",
|
||||||
# Both of which break existing naming conventions (doesn't match param flag).
|
envvar="DBT_SEND_ANONYMOUS_USAGE_STATS",
|
||||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
|
||||||
anonymous_usage_stats = click.option(
|
|
||||||
"--anonymous-usage-stats/--no-anonymous-usage-stats",
|
|
||||||
envvar="DBT_ANONYMOUS_USAGE_STATS",
|
|
||||||
help="Send anonymous usage stats to dbt Labs.",
|
help="Send anonymous usage stats to dbt Labs.",
|
||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
@@ -80,7 +78,9 @@ enable_legacy_logger = click.option(
|
|||||||
hidden=True,
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
|
exclude = click.option(
|
||||||
|
"--exclude", envvar=None, type=tuple, cls=MultiOption, help="Specify the nodes to exclude."
|
||||||
|
)
|
||||||
|
|
||||||
fail_fast = click.option(
|
fail_fast = click.option(
|
||||||
"--fail-fast/--no-fail-fast",
|
"--fail-fast/--no-fail-fast",
|
||||||
@@ -89,6 +89,12 @@ fail_fast = click.option(
|
|||||||
help="Stop execution on first failure.",
|
help="Stop execution on first failure.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
favor_state = click.option(
|
||||||
|
"--favor-state/--no-favor-state",
|
||||||
|
envvar="DBT_FAVOR_STATE",
|
||||||
|
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
|
||||||
|
)
|
||||||
|
|
||||||
full_refresh = click.option(
|
full_refresh = click.option(
|
||||||
"--full-refresh",
|
"--full-refresh",
|
||||||
"-f",
|
"-f",
|
||||||
@@ -101,7 +107,7 @@ indirect_selection = click.option(
|
|||||||
"--indirect-selection",
|
"--indirect-selection",
|
||||||
envvar="DBT_INDIRECT_SELECTION",
|
envvar="DBT_INDIRECT_SELECTION",
|
||||||
help="Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.",
|
help="Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.",
|
||||||
type=click.Choice(["eager", "cautious"], case_sensitive=False),
|
type=click.Choice(["eager", "cautious", "buildable"], case_sensitive=False),
|
||||||
default="eager",
|
default="eager",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -123,7 +129,8 @@ log_path = click.option(
|
|||||||
"--log-path",
|
"--log-path",
|
||||||
envvar="DBT_LOG_PATH",
|
envvar="DBT_LOG_PATH",
|
||||||
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
||||||
type=click.Path(),
|
default=None,
|
||||||
|
type=click.Path(resolve_path=True, path_type=Path),
|
||||||
)
|
)
|
||||||
|
|
||||||
macro_debugging = click.option(
|
macro_debugging = click.option(
|
||||||
@@ -132,21 +139,12 @@ macro_debugging = click.option(
|
|||||||
hidden=True,
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
models = click.option(
|
|
||||||
"-m",
|
|
||||||
"-s",
|
|
||||||
"models",
|
|
||||||
envvar=None,
|
|
||||||
help="Specify the nodes to include.",
|
|
||||||
multiple=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
output = click.option(
|
output = click.option(
|
||||||
"--output",
|
"--output",
|
||||||
envvar=None,
|
envvar=None,
|
||||||
help="TODO: No current help text",
|
help="TODO: No current help text",
|
||||||
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
|
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
|
||||||
default="name",
|
default="selector",
|
||||||
)
|
)
|
||||||
|
|
||||||
output_keys = click.option(
|
output_keys = click.option(
|
||||||
@@ -213,15 +211,24 @@ profiles_dir = click.option(
|
|||||||
"--profiles-dir",
|
"--profiles-dir",
|
||||||
envvar="DBT_PROFILES_DIR",
|
envvar="DBT_PROFILES_DIR",
|
||||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||||
default=default_profiles_dir(),
|
default=default_profiles_dir,
|
||||||
type=click.Path(exists=True),
|
type=click.Path(exists=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# `dbt debug` uses this because it implements custom behaviour for non-existent profiles.yml directories
|
||||||
|
profiles_dir_exists_false = click.option(
|
||||||
|
"--profiles-dir",
|
||||||
|
envvar="DBT_PROFILES_DIR",
|
||||||
|
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||||
|
default=default_profiles_dir,
|
||||||
|
type=click.Path(exists=False),
|
||||||
|
)
|
||||||
|
|
||||||
project_dir = click.option(
|
project_dir = click.option(
|
||||||
"--project-dir",
|
"--project-dir",
|
||||||
envvar=None,
|
envvar=None,
|
||||||
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
||||||
default=default_project_dir(),
|
default=default_project_dir,
|
||||||
type=click.Path(exists=True),
|
type=click.Path(exists=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -240,10 +247,11 @@ record_timing_info = click.option(
|
|||||||
)
|
)
|
||||||
|
|
||||||
resource_type = click.option(
|
resource_type = click.option(
|
||||||
|
"--resource-types",
|
||||||
"--resource-type",
|
"--resource-type",
|
||||||
envvar=None,
|
envvar=None,
|
||||||
help="TODO: No current help text",
|
help="TODO: No current help text",
|
||||||
type=click.Choice(
|
type=ChoiceTuple(
|
||||||
[
|
[
|
||||||
"metric",
|
"metric",
|
||||||
"source",
|
"source",
|
||||||
@@ -258,9 +266,27 @@ resource_type = click.option(
|
|||||||
],
|
],
|
||||||
case_sensitive=False,
|
case_sensitive=False,
|
||||||
),
|
),
|
||||||
default="default",
|
cls=MultiOption,
|
||||||
|
default=(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
model_decls = ("-m", "--models", "--model")
|
||||||
|
select_decls = ("-s", "--select")
|
||||||
|
select_attrs = {
|
||||||
|
"envvar": None,
|
||||||
|
"help": "Specify the nodes to include.",
|
||||||
|
"cls": MultiOption,
|
||||||
|
"type": tuple,
|
||||||
|
}
|
||||||
|
|
||||||
|
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||||
|
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||||
|
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||||
|
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||||
|
models = click.option(*model_decls, **select_attrs)
|
||||||
|
raw_select = click.option(*select_decls, **select_attrs)
|
||||||
|
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||||
|
|
||||||
selector = click.option(
|
selector = click.option(
|
||||||
"--selector", envvar=None, help="The selector name to use, as defined in selectors.yml"
|
"--selector", envvar=None, help="The selector name to use, as defined in selectors.yml"
|
||||||
)
|
)
|
||||||
@@ -269,6 +295,19 @@ show = click.option(
|
|||||||
"--show", envvar=None, help="Show a sample of the loaded data in the terminal", is_flag=True
|
"--show", envvar=None, help="Show a sample of the loaded data in the terminal", is_flag=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: The env var is a correction!
|
||||||
|
# The original env var was `DBT_TEST_SINGLE_THREADED`.
|
||||||
|
# This broke the existing naming convention.
|
||||||
|
# This will need to be communicated as a change to the community!
|
||||||
|
#
|
||||||
|
# N.B. This flag is only used for testing, hence it's hidden from help text.
|
||||||
|
single_threaded = click.option(
|
||||||
|
"--single-threaded/--no-single-threaded",
|
||||||
|
envvar="DBT_SINGLE_THREADED",
|
||||||
|
default=False,
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
skip_profile_setup = click.option(
|
skip_profile_setup = click.option(
|
||||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
|
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
|
||||||
)
|
)
|
||||||
@@ -283,10 +322,10 @@ state = click.option(
|
|||||||
help="If set, use the given directory as the source for json files to compare with this project.",
|
help="If set, use the given directory as the source for json files to compare with this project.",
|
||||||
type=click.Path(
|
type=click.Path(
|
||||||
dir_okay=True,
|
dir_okay=True,
|
||||||
exists=True,
|
|
||||||
file_okay=False,
|
file_okay=False,
|
||||||
readable=True,
|
readable=True,
|
||||||
resolve_path=True,
|
resolve_path=True,
|
||||||
|
path_type=Path,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -319,7 +358,7 @@ threads = click.option(
|
|||||||
"--threads",
|
"--threads",
|
||||||
envvar=None,
|
envvar=None,
|
||||||
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
||||||
default=1,
|
default=None,
|
||||||
type=click.INT,
|
type=click.INT,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -341,12 +380,26 @@ vars = click.option(
|
|||||||
envvar=None,
|
envvar=None,
|
||||||
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||||
type=YAML(),
|
type=YAML(),
|
||||||
|
default="{}",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: when legacy flags are deprecated use
|
||||||
|
# click.version_option instead of a callback
|
||||||
|
def _version_callback(ctx, _param, value):
|
||||||
|
if not value or ctx.resilient_parsing:
|
||||||
|
return
|
||||||
|
click.echo(get_version_information())
|
||||||
|
ctx.exit()
|
||||||
|
|
||||||
|
|
||||||
version = click.option(
|
version = click.option(
|
||||||
"--version",
|
"--version",
|
||||||
|
callback=_version_callback,
|
||||||
envvar=None,
|
envvar=None,
|
||||||
|
expose_value=False,
|
||||||
help="Show version information",
|
help="Show version information",
|
||||||
|
is_eager=True,
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -362,13 +415,13 @@ warn_error = click.option(
|
|||||||
envvar="DBT_WARN_ERROR",
|
envvar="DBT_WARN_ERROR",
|
||||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||||
default=None,
|
default=None,
|
||||||
flag_value=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
warn_error_options = click.option(
|
warn_error_options = click.option(
|
||||||
"--warn-error-options",
|
"--warn-error-options",
|
||||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||||
default=None,
|
default="{}",
|
||||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||||
type=WarnErrorOptionsType(),
|
type=WarnErrorOptionsType(),
|
||||||
|
|||||||
187
core/dbt/cli/requires.py
Normal file
187
core/dbt/cli/requires.py
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
from dbt.version import installed as installed_version
|
||||||
|
from dbt.adapters.factory import adapter_management, register_adapter
|
||||||
|
from dbt.flags import set_flags, get_flag_dict
|
||||||
|
from dbt.cli.flags import Flags
|
||||||
|
from dbt.config import RuntimeConfig
|
||||||
|
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||||
|
from dbt.events.functions import setup_event_logger, fire_event, LOG_VERSION
|
||||||
|
from dbt.events.types import MainReportVersion, MainReportArgs, MainTrackingUserState
|
||||||
|
from dbt.exceptions import DbtProjectError
|
||||||
|
from dbt.parser.manifest import ManifestLoader, write_manifest
|
||||||
|
from dbt.profiler import profiler
|
||||||
|
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||||
|
from dbt.utils import cast_dict_to_dict_of_strings
|
||||||
|
|
||||||
|
from click import Context
|
||||||
|
from functools import update_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def preflight(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
ctx.obj = ctx.obj or {}
|
||||||
|
|
||||||
|
# Flags
|
||||||
|
flags = Flags(ctx)
|
||||||
|
ctx.obj["flags"] = flags
|
||||||
|
set_flags(flags)
|
||||||
|
|
||||||
|
# Tracking
|
||||||
|
initialize_from_flags(flags.SEND_ANONYMOUS_USAGE_STATS, flags.PROFILES_DIR)
|
||||||
|
ctx.with_resource(track_run(run_command=flags.WHICH))
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
# N.B. Legacy logger is not supported
|
||||||
|
setup_event_logger(
|
||||||
|
flags.LOG_PATH,
|
||||||
|
flags.LOG_FORMAT,
|
||||||
|
flags.USE_COLORS,
|
||||||
|
flags.DEBUG,
|
||||||
|
flags.LOG_CACHE_EVENTS,
|
||||||
|
flags.QUIET,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now that we have our logger, fire away!
|
||||||
|
fire_event(MainReportVersion(version=str(installed_version), log_version=LOG_VERSION))
|
||||||
|
flags_dict_str = cast_dict_to_dict_of_strings(get_flag_dict())
|
||||||
|
fire_event(MainReportArgs(args=flags_dict_str))
|
||||||
|
|
||||||
|
if active_user is not None: # mypy appeasement, always true
|
||||||
|
fire_event(MainTrackingUserState(user_state=active_user.state()))
|
||||||
|
|
||||||
|
# Profiling
|
||||||
|
if flags.RECORD_TIMING_INFO:
|
||||||
|
ctx.with_resource(profiler(enable=True, outfile=flags.RECORD_TIMING_INFO))
|
||||||
|
|
||||||
|
# Adapter management
|
||||||
|
ctx.with_resource(adapter_management())
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: UnsetProfile is necessary for deps and clean to load a project.
|
||||||
|
# This decorator and its usage can be removed once https://github.com/dbt-labs/dbt-core/issues/6257 is closed.
|
||||||
|
def unset_profile(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
if ctx.obj.get("profile") is None:
|
||||||
|
profile = UnsetProfile()
|
||||||
|
ctx.obj["profile"] = profile
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def profile(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
if ctx.obj.get("profile") is None:
|
||||||
|
flags = ctx.obj["flags"]
|
||||||
|
# TODO: Generalize safe access to flags.THREADS:
|
||||||
|
# https://github.com/dbt-labs/dbt-core/issues/6259
|
||||||
|
threads = getattr(flags, "THREADS", None)
|
||||||
|
profile = load_profile(
|
||||||
|
flags.PROJECT_DIR, flags.VARS, flags.PROFILE, flags.TARGET, threads
|
||||||
|
)
|
||||||
|
ctx.obj["profile"] = profile
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def project(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
if ctx.obj.get("project") is None:
|
||||||
|
# TODO: Decouple target from profile, and remove the need for profile here:
|
||||||
|
# https://github.com/dbt-labs/dbt-core/issues/6257
|
||||||
|
if not ctx.obj.get("profile"):
|
||||||
|
raise DbtProjectError("profile required for project")
|
||||||
|
|
||||||
|
flags = ctx.obj["flags"]
|
||||||
|
project = load_project(
|
||||||
|
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
||||||
|
)
|
||||||
|
ctx.obj["project"] = project
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def runtime_config(func):
|
||||||
|
"""A decorator used by click command functions for generating a runtime
|
||||||
|
config given a profile and project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
req_strs = ["profile", "project"]
|
||||||
|
reqs = [ctx.obj.get(req_str) for req_str in req_strs]
|
||||||
|
|
||||||
|
if None in reqs:
|
||||||
|
raise DbtProjectError("profile and project required for runtime_config")
|
||||||
|
|
||||||
|
ctx.obj["runtime_config"] = RuntimeConfig.from_parts(
|
||||||
|
ctx.obj["project"],
|
||||||
|
ctx.obj["profile"],
|
||||||
|
ctx.obj["flags"],
|
||||||
|
)
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def manifest(*args0, write=True, write_perf_info=False):
|
||||||
|
"""A decorator used by click command functions for generating a manifest
|
||||||
|
given a profile, project, and runtime config. This also registers the adaper
|
||||||
|
from the runtime config and conditionally writes the manifest to disc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def outer_wrapper(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
req_strs = ["profile", "project", "runtime_config"]
|
||||||
|
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
||||||
|
|
||||||
|
if None in reqs:
|
||||||
|
raise DbtProjectError("profile, project, and runtime_config required for manifest")
|
||||||
|
|
||||||
|
runtime_config = ctx.obj["runtime_config"]
|
||||||
|
register_adapter(runtime_config)
|
||||||
|
|
||||||
|
# a manifest has already been set on the context, so don't overwrite it
|
||||||
|
if ctx.obj.get("manifest") is None:
|
||||||
|
manifest = ManifestLoader.get_full_manifest(
|
||||||
|
runtime_config, write_perf_info=write_perf_info
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx.obj["manifest"] = manifest
|
||||||
|
if write and ctx.obj["flags"].write_json:
|
||||||
|
write_manifest(manifest, ctx.obj["runtime_config"].target_path)
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
# if there are no args, the decorator was used without params @decorator
|
||||||
|
# otherwise, the decorator was called with params @decorator(arg)
|
||||||
|
if len(args0) == 0:
|
||||||
|
return outer_wrapper
|
||||||
|
return outer_wrapper(args0[0])
|
||||||
@@ -1,11 +1,31 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from dbt.config.project import PartialProject
|
||||||
|
from dbt.exceptions import DbtProjectError
|
||||||
|
|
||||||
|
|
||||||
def default_project_dir():
|
def default_project_dir() -> Path:
|
||||||
paths = list(Path.cwd().parents)
|
paths = list(Path.cwd().parents)
|
||||||
paths.insert(0, Path.cwd())
|
paths.insert(0, Path.cwd())
|
||||||
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
|
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
|
||||||
|
|
||||||
|
|
||||||
def default_profiles_dir():
|
def default_profiles_dir() -> Path:
|
||||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||||
|
|
||||||
|
|
||||||
|
def default_log_path(project_dir: Path, verify_version: bool = False) -> Path:
|
||||||
|
"""If available, derive a default log path from dbt_project.yml. Otherwise, default to "logs".
|
||||||
|
Known limitations:
|
||||||
|
1. Using PartialProject here, so no jinja rendering of log-path.
|
||||||
|
2. Programmatic invocations of the cli via dbtRunner may pass a Project object directly,
|
||||||
|
which is not being taken into consideration here to extract a log-path.
|
||||||
|
"""
|
||||||
|
default_log_path = Path("logs")
|
||||||
|
try:
|
||||||
|
partial = PartialProject.from_project_root(str(project_dir), verify_version=verify_version)
|
||||||
|
partial_log_path = partial.project_dict.get("log-path") or default_log_path
|
||||||
|
default_log_path = Path(project_dir) / partial_log_path
|
||||||
|
except DbtProjectError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return default_log_path
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ from dbt.exceptions import (
|
|||||||
UndefinedCompilationError,
|
UndefinedCompilationError,
|
||||||
UndefinedMacroError,
|
UndefinedMacroError,
|
||||||
)
|
)
|
||||||
from dbt import flags
|
from dbt.flags import get_flags
|
||||||
from dbt.node_types import ModelLanguage
|
from dbt.node_types import ModelLanguage
|
||||||
|
|
||||||
|
|
||||||
@@ -99,8 +99,9 @@ class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):
|
|||||||
If the value is 'write', also write the files to disk.
|
If the value is 'write', also write the files to disk.
|
||||||
WARNING: This can write a ton of data if you aren't careful.
|
WARNING: This can write a ton of data if you aren't careful.
|
||||||
"""
|
"""
|
||||||
if filename == "<template>" and flags.MACRO_DEBUGGING:
|
macro_debugging = get_flags().MACRO_DEBUGGING
|
||||||
write = flags.MACRO_DEBUGGING == "write"
|
if filename == "<template>" and macro_debugging:
|
||||||
|
write = macro_debugging == "write"
|
||||||
filename = _linecache_inject(source, write)
|
filename = _linecache_inject(source, write)
|
||||||
|
|
||||||
return super()._compile(source, filename) # type: ignore
|
return super()._compile(source, filename) # type: ignore
|
||||||
|
|||||||
@@ -1,30 +1,32 @@
|
|||||||
import errno
|
import errno
|
||||||
import functools
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tarfile
|
import tarfile
|
||||||
import requests
|
from pathlib import Path
|
||||||
import stat
|
from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union
|
||||||
from typing import Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
|
|
||||||
from pathspec import PathSpec # type: ignore
|
|
||||||
|
|
||||||
|
import dbt.exceptions
|
||||||
|
import requests
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
SystemErrorRetrievingModTime,
|
|
||||||
SystemCouldNotWrite,
|
SystemCouldNotWrite,
|
||||||
|
SystemErrorRetrievingModTime,
|
||||||
SystemExecutingCmd,
|
SystemExecutingCmd,
|
||||||
SystemStdOut,
|
SystemStdOut,
|
||||||
SystemStdErr,
|
SystemStdErr,
|
||||||
SystemReportReturnCode,
|
SystemReportReturnCode,
|
||||||
)
|
)
|
||||||
import dbt.exceptions
|
from dbt.exceptions import DbtInternalError
|
||||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||||
|
from pathspec import PathSpec # type: ignore
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from ctypes import WinDLL, c_bool
|
from ctypes import WinDLL, c_bool
|
||||||
@@ -106,12 +108,18 @@ def load_file_contents(path: str, strip: bool = True) -> str:
|
|||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
|
|
||||||
def make_directory(path: str) -> None:
|
@functools.singledispatch
|
||||||
|
def make_directory(path=None) -> None:
|
||||||
"""
|
"""
|
||||||
Make a directory and any intermediate directories that don't already
|
Make a directory and any intermediate directories that don't already
|
||||||
exist. This function handles the case where two threads try to create
|
exist. This function handles the case where two threads try to create
|
||||||
a directory at once.
|
a directory at once.
|
||||||
"""
|
"""
|
||||||
|
raise DbtInternalError(f"Can not create directory from {type(path)} ")
|
||||||
|
|
||||||
|
|
||||||
|
@make_directory.register
|
||||||
|
def _(path: str) -> None:
|
||||||
path = convert_path(path)
|
path = convert_path(path)
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
# concurrent writes that try to create the same dir can fail
|
# concurrent writes that try to create the same dir can fail
|
||||||
@@ -125,6 +133,11 @@ def make_directory(path: str) -> None:
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
@make_directory.register
|
||||||
|
def _(path: Path) -> None:
|
||||||
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool:
|
def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool:
|
||||||
"""
|
"""
|
||||||
Make a file at `path` assuming that the directory it resides in already
|
Make a file at `path` assuming that the directory it resides in already
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import os
|
import argparse
|
||||||
from collections import defaultdict
|
|
||||||
from typing import List, Dict, Any, Tuple, Optional
|
|
||||||
|
|
||||||
import networkx as nx # type: ignore
|
import networkx as nx # type: ignore
|
||||||
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import sqlparse
|
import sqlparse
|
||||||
|
|
||||||
from dbt import flags
|
from collections import defaultdict
|
||||||
|
from typing import List, Dict, Any, Tuple, Optional
|
||||||
|
|
||||||
|
from dbt.flags import get_flags
|
||||||
from dbt.adapters.factory import get_adapter
|
from dbt.adapters.factory import get_adapter
|
||||||
from dbt.clients import jinja
|
from dbt.clients import jinja
|
||||||
from dbt.clients.system import make_directory
|
from dbt.clients.system import make_directory
|
||||||
@@ -32,6 +33,7 @@ from dbt.events.contextvars import get_node_info
|
|||||||
from dbt.node_types import NodeType, ModelLanguage
|
from dbt.node_types import NodeType, ModelLanguage
|
||||||
from dbt.events.format import pluralize
|
from dbt.events.format import pluralize
|
||||||
import dbt.tracking
|
import dbt.tracking
|
||||||
|
import dbt.task.list as list_task
|
||||||
|
|
||||||
graph_file_name = "graph.gpickle"
|
graph_file_name = "graph.gpickle"
|
||||||
|
|
||||||
@@ -48,6 +50,7 @@ def print_compile_stats(stats):
|
|||||||
NodeType.Source: "source",
|
NodeType.Source: "source",
|
||||||
NodeType.Exposure: "exposure",
|
NodeType.Exposure: "exposure",
|
||||||
NodeType.Metric: "metric",
|
NodeType.Metric: "metric",
|
||||||
|
NodeType.Group: "group",
|
||||||
}
|
}
|
||||||
|
|
||||||
results = {k: 0 for k in names.keys()}
|
results = {k: 0 for k in names.keys()}
|
||||||
@@ -85,6 +88,8 @@ def _generate_stats(manifest: Manifest):
|
|||||||
stats[metric.resource_type] += 1
|
stats[metric.resource_type] += 1
|
||||||
for macro in manifest.macros.values():
|
for macro in manifest.macros.values():
|
||||||
stats[macro.resource_type] += 1
|
stats[macro.resource_type] += 1
|
||||||
|
for group in manifest.groups.values():
|
||||||
|
stats[group.resource_type] += 1
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
|
||||||
@@ -351,13 +356,6 @@ class Compiler:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if node.language == ModelLanguage.python:
|
if node.language == ModelLanguage.python:
|
||||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
|
||||||
|
|
||||||
# quoating seems like something very specific to sql so far
|
|
||||||
# for all python implementations we are seeing there's no quating.
|
|
||||||
# TODO try to find better way to do this, given that
|
|
||||||
original_quoting = self.config.quoting
|
|
||||||
self.config.quoting = {key: False for key in original_quoting.keys()}
|
|
||||||
context = self._create_node_context(node, manifest, extra_context)
|
context = self._create_node_context(node, manifest, extra_context)
|
||||||
|
|
||||||
postfix = jinja.get_rendered(
|
postfix = jinja.get_rendered(
|
||||||
@@ -367,8 +365,6 @@ class Compiler:
|
|||||||
)
|
)
|
||||||
# we should NOT jinja render the python model's 'raw code'
|
# we should NOT jinja render the python model's 'raw code'
|
||||||
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||||
# restore quoting settings in the end since context is lazy evaluated
|
|
||||||
self.config.quoting = original_quoting
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
context = self._create_node_context(node, manifest, extra_context)
|
context = self._create_node_context(node, manifest, extra_context)
|
||||||
@@ -378,6 +374,18 @@ class Compiler:
|
|||||||
node,
|
node,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# relation_name is set at parse time, except for tests without store_failures,
|
||||||
|
# but cli param can turn on store_failures, so we set here.
|
||||||
|
if (
|
||||||
|
node.resource_type == NodeType.Test
|
||||||
|
and node.relation_name is None
|
||||||
|
and node.is_relational
|
||||||
|
):
|
||||||
|
adapter = get_adapter(self.config)
|
||||||
|
relation_cls = adapter.Relation
|
||||||
|
relation_name = str(relation_cls.create_from(self.config, node))
|
||||||
|
node.relation_name = relation_name
|
||||||
|
|
||||||
node.compiled = True
|
node.compiled = True
|
||||||
|
|
||||||
return node
|
return node
|
||||||
@@ -385,6 +393,7 @@ class Compiler:
|
|||||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||||
filename = graph_file_name
|
filename = graph_file_name
|
||||||
graph_path = os.path.join(self.config.target_path, filename)
|
graph_path = os.path.join(self.config.target_path, filename)
|
||||||
|
flags = get_flags()
|
||||||
if flags.WRITE_JSON:
|
if flags.WRITE_JSON:
|
||||||
linker.write_graph(graph_path, manifest)
|
linker.write_graph(graph_path, manifest)
|
||||||
|
|
||||||
@@ -482,7 +491,13 @@ class Compiler:
|
|||||||
|
|
||||||
if write:
|
if write:
|
||||||
self.write_graph_file(linker, manifest)
|
self.write_graph_file(linker, manifest)
|
||||||
print_compile_stats(stats)
|
|
||||||
|
# Do not print these for ListTask's
|
||||||
|
if not (
|
||||||
|
self.config.args.__class__ == argparse.Namespace
|
||||||
|
and self.config.args.cls == list_task.ListTask
|
||||||
|
):
|
||||||
|
print_compile_stats(stats)
|
||||||
|
|
||||||
return Graph(linker.graph)
|
return Graph(linker.graph)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||||
from .profile import Profile, read_user_config # noqa
|
from .profile import Profile, read_user_config # noqa
|
||||||
from .project import Project, IsFQNResource # noqa
|
from .project import Project, IsFQNResource, PartialProject # noqa
|
||||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
from .runtime import RuntimeConfig # noqa
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import os
|
|||||||
|
|
||||||
from dbt.dataclass_schema import ValidationError
|
from dbt.dataclass_schema import ValidationError
|
||||||
|
|
||||||
from dbt import flags
|
from dbt.flags import get_flags
|
||||||
from dbt.clients.system import load_file_contents
|
from dbt.clients.system import load_file_contents
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.contracts.connection import Credentials, HasCredentials
|
from dbt.contracts.connection import Credentials, HasCredentials
|
||||||
@@ -32,22 +32,6 @@ dbt encountered an error while trying to read your profiles.yml file.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
|
||||||
dbt cannot run because no profile was specified for this dbt project.
|
|
||||||
To specify a profile for this project, add a line like the this to
|
|
||||||
your dbt_project.yml file:
|
|
||||||
|
|
||||||
profile: [profile name]
|
|
||||||
|
|
||||||
Here, [profile name] should be replaced with a profile name
|
|
||||||
defined in your profiles.yml file. You can find profiles.yml here:
|
|
||||||
|
|
||||||
{profiles_file}/profiles.yml
|
|
||||||
""".format(
|
|
||||||
profiles_file=flags.DEFAULT_PROFILES_DIR
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||||
path = os.path.join(profiles_dir, "profiles.yml")
|
path = os.path.join(profiles_dir, "profiles.yml")
|
||||||
|
|
||||||
@@ -197,10 +181,33 @@ class Profile(HasCredentials):
|
|||||||
args_profile_name: Optional[str],
|
args_profile_name: Optional[str],
|
||||||
project_profile_name: Optional[str] = None,
|
project_profile_name: Optional[str] = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
# TODO: Duplicating this method as direct copy of the implementation in dbt.cli.resolvers
|
||||||
|
# dbt.cli.resolvers implementation can't be used because it causes a circular dependency.
|
||||||
|
# This should be removed and use a safe default access on the Flags module when
|
||||||
|
# https://github.com/dbt-labs/dbt-core/issues/6259 is closed.
|
||||||
|
def default_profiles_dir():
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||||
|
|
||||||
profile_name = project_profile_name
|
profile_name = project_profile_name
|
||||||
if args_profile_name is not None:
|
if args_profile_name is not None:
|
||||||
profile_name = args_profile_name
|
profile_name = args_profile_name
|
||||||
if profile_name is None:
|
if profile_name is None:
|
||||||
|
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||||
|
dbt cannot run because no profile was specified for this dbt project.
|
||||||
|
To specify a profile for this project, add a line like the this to
|
||||||
|
your dbt_project.yml file:
|
||||||
|
|
||||||
|
profile: [profile name]
|
||||||
|
|
||||||
|
Here, [profile name] should be replaced with a profile name
|
||||||
|
defined in your profiles.yml file. You can find profiles.yml here:
|
||||||
|
|
||||||
|
{profiles_file}/profiles.yml
|
||||||
|
""".format(
|
||||||
|
profiles_file=default_profiles_dir()
|
||||||
|
)
|
||||||
raise DbtProjectError(NO_SUPPLIED_PROFILE_ERROR)
|
raise DbtProjectError(NO_SUPPLIED_PROFILE_ERROR)
|
||||||
return profile_name
|
return profile_name
|
||||||
|
|
||||||
@@ -401,11 +408,13 @@ class Profile(HasCredentials):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def render_from_args(
|
def render(
|
||||||
cls,
|
cls,
|
||||||
args: Any,
|
|
||||||
renderer: ProfileRenderer,
|
renderer: ProfileRenderer,
|
||||||
project_profile_name: Optional[str],
|
project_profile_name: Optional[str],
|
||||||
|
profile_name_override: Optional[str] = None,
|
||||||
|
target_override: Optional[str] = None,
|
||||||
|
threads_override: Optional[int] = None,
|
||||||
) -> "Profile":
|
) -> "Profile":
|
||||||
"""Given the raw profiles as read from disk and the name of the desired
|
"""Given the raw profiles as read from disk and the name of the desired
|
||||||
profile if specified, return the profile component of the runtime
|
profile if specified, return the profile component of the runtime
|
||||||
@@ -421,10 +430,9 @@ class Profile(HasCredentials):
|
|||||||
target could not be found.
|
target could not be found.
|
||||||
:returns Profile: The new Profile object.
|
:returns Profile: The new Profile object.
|
||||||
"""
|
"""
|
||||||
threads_override = getattr(args, "threads", None)
|
flags = get_flags()
|
||||||
target_override = getattr(args, "target", None)
|
|
||||||
raw_profiles = read_profile(flags.PROFILES_DIR)
|
raw_profiles = read_profile(flags.PROFILES_DIR)
|
||||||
profile_name = cls.pick_profile_name(getattr(args, "profile", None), project_profile_name)
|
profile_name = cls.pick_profile_name(profile_name_override, project_profile_name)
|
||||||
return cls.from_raw_profiles(
|
return cls.from_raw_profiles(
|
||||||
raw_profiles=raw_profiles,
|
raw_profiles=raw_profiles,
|
||||||
profile_name=profile_name,
|
profile_name=profile_name,
|
||||||
|
|||||||
@@ -12,10 +12,10 @@ from typing import (
|
|||||||
)
|
)
|
||||||
from typing_extensions import Protocol, runtime_checkable
|
from typing_extensions import Protocol, runtime_checkable
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from dbt import flags, deprecations
|
from dbt.flags import get_flags
|
||||||
|
from dbt import deprecations
|
||||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.contracts.connection import QueryComment
|
from dbt.contracts.connection import QueryComment
|
||||||
@@ -30,16 +30,16 @@ from dbt.graph import SelectionSpec
|
|||||||
from dbt.helper_types import NoValue
|
from dbt.helper_types import NoValue
|
||||||
from dbt.semver import VersionSpecifier, versions_compatible
|
from dbt.semver import VersionSpecifier, versions_compatible
|
||||||
from dbt.version import get_installed_version
|
from dbt.version import get_installed_version
|
||||||
from dbt.utils import MultiDict
|
from dbt.utils import MultiDict, md5
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt.config.selectors import SelectorDict
|
from dbt.config.selectors import SelectorDict
|
||||||
from dbt.contracts.project import (
|
from dbt.contracts.project import (
|
||||||
Project as ProjectContract,
|
Project as ProjectContract,
|
||||||
SemverString,
|
SemverString,
|
||||||
)
|
)
|
||||||
from dbt.contracts.project import PackageConfig
|
from dbt.contracts.project import PackageConfig, ProjectPackageMetadata
|
||||||
from dbt.dataclass_schema import ValidationError
|
from dbt.dataclass_schema import ValidationError
|
||||||
from .renderer import DbtProjectYamlRenderer
|
from .renderer import DbtProjectYamlRenderer, PackageRenderer
|
||||||
from .selectors import (
|
from .selectors import (
|
||||||
selector_config_from_data,
|
selector_config_from_data,
|
||||||
selector_data_from_root,
|
selector_data_from_root,
|
||||||
@@ -75,6 +75,11 @@ Validator Error:
|
|||||||
{error}
|
{error}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
MISSING_DBT_PROJECT_ERROR = """\
|
||||||
|
No dbt_project.yml found at expected path {path}
|
||||||
|
Verify that each entry within packages.yml (and their transitive dependencies) contains a file named dbt_project.yml
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
@runtime_checkable
|
||||||
class IsFQNResource(Protocol):
|
class IsFQNResource(Protocol):
|
||||||
@@ -156,16 +161,14 @@ def value_or(value: Optional[T], default: T) -> T:
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def _raw_project_from(project_root: str) -> Dict[str, Any]:
|
def load_raw_project(project_root: str) -> Dict[str, Any]:
|
||||||
|
|
||||||
project_root = os.path.normpath(project_root)
|
project_root = os.path.normpath(project_root)
|
||||||
project_yaml_filepath = os.path.join(project_root, "dbt_project.yml")
|
project_yaml_filepath = os.path.join(project_root, "dbt_project.yml")
|
||||||
|
|
||||||
# get the project.yml contents
|
# get the project.yml contents
|
||||||
if not path_exists(project_yaml_filepath):
|
if not path_exists(project_yaml_filepath):
|
||||||
raise DbtProjectError(
|
raise DbtProjectError(MISSING_DBT_PROJECT_ERROR.format(path=project_yaml_filepath))
|
||||||
"no dbt_project.yml found at expected path {}".format(project_yaml_filepath)
|
|
||||||
)
|
|
||||||
|
|
||||||
project_dict = _load_yaml(project_yaml_filepath)
|
project_dict = _load_yaml(project_yaml_filepath)
|
||||||
|
|
||||||
@@ -289,6 +292,13 @@ class PartialProject(RenderComponents):
|
|||||||
exc.path = os.path.join(self.project_root, "dbt_project.yml")
|
exc.path = os.path.join(self.project_root, "dbt_project.yml")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def render_package_metadata(self, renderer: PackageRenderer) -> ProjectPackageMetadata:
|
||||||
|
packages_data = renderer.render_data(self.packages_dict)
|
||||||
|
packages_config = package_config_from_data(packages_data)
|
||||||
|
if not self.project_name:
|
||||||
|
raise DbtProjectError("Package dbt_project.yml must have a name!")
|
||||||
|
return ProjectPackageMetadata(self.project_name, packages_config.packages)
|
||||||
|
|
||||||
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
||||||
if deprecated_path in project_dict:
|
if deprecated_path in project_dict:
|
||||||
if exp_path in project_dict:
|
if exp_path in project_dict:
|
||||||
@@ -363,9 +373,13 @@ class PartialProject(RenderComponents):
|
|||||||
|
|
||||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||||
target_path: str = flag_or(flags.TARGET_PATH, cfg.target_path, "target")
|
flags = get_flags()
|
||||||
|
|
||||||
|
flag_target_path = str(flags.TARGET_PATH) if flags.TARGET_PATH else None
|
||||||
|
target_path: str = flag_or(flag_target_path, cfg.target_path, "target")
|
||||||
|
log_path: str = str(flags.LOG_PATH)
|
||||||
|
|
||||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||||
log_path: str = flag_or(flags.LOG_PATH, cfg.log_path, "logs")
|
|
||||||
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
||||||
# in the default case we'll populate this once we know the adapter type
|
# in the default case we'll populate this once we know the adapter type
|
||||||
# It would be nice to just pass along a Quoting here, but that would
|
# It would be nice to just pass along a Quoting here, but that would
|
||||||
@@ -485,7 +499,7 @@ class PartialProject(RenderComponents):
|
|||||||
cls, project_root: str, *, verify_version: bool = False
|
cls, project_root: str, *, verify_version: bool = False
|
||||||
) -> "PartialProject":
|
) -> "PartialProject":
|
||||||
project_root = os.path.normpath(project_root)
|
project_root = os.path.normpath(project_root)
|
||||||
project_dict = _raw_project_from(project_root)
|
project_dict = load_raw_project(project_root)
|
||||||
config_version = project_dict.get("config-version", 1)
|
config_version = project_dict.get("config-version", 1)
|
||||||
if config_version != 2:
|
if config_version != 2:
|
||||||
raise DbtProjectError(
|
raise DbtProjectError(
|
||||||
@@ -659,11 +673,11 @@ class Project:
|
|||||||
*,
|
*,
|
||||||
verify_version: bool = False,
|
verify_version: bool = False,
|
||||||
) -> "Project":
|
) -> "Project":
|
||||||
partial = cls.partial_load(project_root, verify_version=verify_version)
|
partial = PartialProject.from_project_root(project_root, verify_version=verify_version)
|
||||||
return partial.render(renderer)
|
return partial.render(renderer)
|
||||||
|
|
||||||
def hashed_name(self):
|
def hashed_name(self):
|
||||||
return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
|
return md5(self.project_name)
|
||||||
|
|
||||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||||
if name not in self.selectors:
|
if name not in self.selectors:
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
|||||||
if cli_vars is None:
|
if cli_vars is None:
|
||||||
cli_vars = {}
|
cli_vars = {}
|
||||||
if profile:
|
if profile:
|
||||||
self.ctx_obj = TargetContext(profile, cli_vars)
|
self.ctx_obj = TargetContext(profile.to_target_dict(), cli_vars)
|
||||||
else:
|
else:
|
||||||
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
||||||
context = self.ctx_obj.to_dict()
|
context = self.ctx_obj.to_dict()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
@@ -13,17 +13,18 @@ from typing import (
|
|||||||
Optional,
|
Optional,
|
||||||
Tuple,
|
Tuple,
|
||||||
Type,
|
Type,
|
||||||
Union,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt import flags
|
from dbt.flags import get_flags
|
||||||
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
|
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
|
||||||
from dbt.config.profile import read_user_config
|
from dbt.config.project import load_raw_project
|
||||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
from dbt.contracts.connection import AdapterRequiredConfig, Credentials, HasCredentials
|
||||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||||
from dbt.contracts.project import Configuration, UserConfig
|
from dbt.contracts.project import Configuration, UserConfig
|
||||||
from dbt.contracts.relation import ComponentName
|
from dbt.contracts.relation import ComponentName
|
||||||
from dbt.dataclass_schema import ValidationError
|
from dbt.dataclass_schema import ValidationError
|
||||||
|
from dbt.events.functions import warn_or_error
|
||||||
|
from dbt.events.types import UnusedResourceConfigPath
|
||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
ConfigContractBrokenError,
|
ConfigContractBrokenError,
|
||||||
DbtProjectError,
|
DbtProjectError,
|
||||||
@@ -31,14 +32,46 @@ from dbt.exceptions import (
|
|||||||
DbtRuntimeError,
|
DbtRuntimeError,
|
||||||
UninstalledPackagesFoundError,
|
UninstalledPackagesFoundError,
|
||||||
)
|
)
|
||||||
from dbt.events.functions import warn_or_error
|
|
||||||
from dbt.events.types import UnusedResourceConfigPath
|
|
||||||
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
||||||
|
|
||||||
from .profile import Profile
|
from .profile import Profile
|
||||||
from .project import Project, PartialProject
|
from .project import Project
|
||||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||||
from .utils import parse_cli_vars
|
|
||||||
|
|
||||||
|
def load_project(
|
||||||
|
project_root: str,
|
||||||
|
version_check: bool,
|
||||||
|
profile: HasCredentials,
|
||||||
|
cli_vars: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Project:
|
||||||
|
# get the project with all of the provided information
|
||||||
|
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||||
|
project = Project.from_project_root(
|
||||||
|
project_root, project_renderer, verify_version=version_check
|
||||||
|
)
|
||||||
|
|
||||||
|
# Save env_vars encountered in rendering for partial parsing
|
||||||
|
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||||
|
return project
|
||||||
|
|
||||||
|
|
||||||
|
def load_profile(
|
||||||
|
project_root: str,
|
||||||
|
cli_vars: Dict[str, Any],
|
||||||
|
profile_name_override: Optional[str] = None,
|
||||||
|
target_override: Optional[str] = None,
|
||||||
|
threads_override: Optional[int] = None,
|
||||||
|
) -> Profile:
|
||||||
|
raw_project = load_raw_project(project_root)
|
||||||
|
raw_profile_name = raw_project.get("profile")
|
||||||
|
profile_renderer = ProfileRenderer(cli_vars)
|
||||||
|
profile_name = profile_renderer.render_value(raw_profile_name)
|
||||||
|
profile = Profile.render(
|
||||||
|
profile_renderer, profile_name, profile_name_override, target_override, threads_override
|
||||||
|
)
|
||||||
|
# Save env_vars encountered in rendering for partial parsing
|
||||||
|
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||||
|
return profile
|
||||||
|
|
||||||
|
|
||||||
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
|
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
|
||||||
@@ -62,6 +95,21 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
self.validate()
|
self.validate()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_profile(
|
||||||
|
cls,
|
||||||
|
project_root: str,
|
||||||
|
cli_vars: Dict[str, Any],
|
||||||
|
args: Any,
|
||||||
|
) -> Profile:
|
||||||
|
return load_profile(
|
||||||
|
project_root,
|
||||||
|
cli_vars,
|
||||||
|
args.profile,
|
||||||
|
args.target,
|
||||||
|
args.threads,
|
||||||
|
)
|
||||||
|
|
||||||
# Called by 'new_project' and 'from_args'
|
# Called by 'new_project' and 'from_args'
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_parts(
|
def from_parts(
|
||||||
@@ -84,7 +132,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
.replace_dict(_project_quoting_dict(project, profile))
|
.replace_dict(_project_quoting_dict(project, profile))
|
||||||
).to_dict(omit_none=True)
|
).to_dict(omit_none=True)
|
||||||
|
|
||||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
project_name=project.project_name,
|
project_name=project.project_name,
|
||||||
@@ -149,11 +197,10 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
|
|
||||||
# load the new project and its packages. Don't pass cli variables.
|
# load the new project and its packages. Don't pass cli variables.
|
||||||
renderer = DbtProjectYamlRenderer(profile)
|
renderer = DbtProjectYamlRenderer(profile)
|
||||||
|
|
||||||
project = Project.from_project_root(
|
project = Project.from_project_root(
|
||||||
project_root,
|
project_root,
|
||||||
renderer,
|
renderer,
|
||||||
verify_version=bool(flags.VERSION_CHECK),
|
verify_version=bool(getattr(self.args, "VERSION_CHECK", True)),
|
||||||
)
|
)
|
||||||
|
|
||||||
runtime_config = self.from_parts(
|
runtime_config = self.from_parts(
|
||||||
@@ -189,64 +236,19 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
raise ConfigContractBrokenError(e) from e
|
raise ConfigContractBrokenError(e) from e
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_rendered_profile(
|
|
||||||
cls,
|
|
||||||
args: Any,
|
|
||||||
profile_renderer: ProfileRenderer,
|
|
||||||
profile_name: Optional[str],
|
|
||||||
) -> Profile:
|
|
||||||
|
|
||||||
return Profile.render_from_args(args, profile_renderer, profile_name)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
||||||
|
# profile_name from the project
|
||||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
|
||||||
|
|
||||||
profile = cls.collect_profile(args=args)
|
|
||||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
|
||||||
project = cls.collect_project(args=args, project_renderer=project_renderer)
|
|
||||||
assert type(project) is Project
|
|
||||||
return (project, profile)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def collect_profile(
|
|
||||||
cls: Type["RuntimeConfig"], args: Any, profile_name: Optional[str] = None
|
|
||||||
) -> Profile:
|
|
||||||
|
|
||||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
|
||||||
profile_renderer = ProfileRenderer(cli_vars)
|
|
||||||
|
|
||||||
# build the profile using the base renderer and the one fact we know
|
|
||||||
if profile_name is None:
|
|
||||||
# Note: only the named profile section is rendered here. The rest of the
|
|
||||||
# profile is ignored.
|
|
||||||
partial = cls.collect_project(args)
|
|
||||||
assert type(partial) is PartialProject
|
|
||||||
profile_name = partial.render_profile_name(profile_renderer)
|
|
||||||
|
|
||||||
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
|
|
||||||
# Save env_vars encountered in rendering for partial parsing
|
|
||||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
|
||||||
return profile
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def collect_project(
|
|
||||||
cls: Type["RuntimeConfig"],
|
|
||||||
args: Any,
|
|
||||||
project_renderer: Optional[DbtProjectYamlRenderer] = None,
|
|
||||||
) -> Union[Project, PartialProject]:
|
|
||||||
|
|
||||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||||
version_check = bool(flags.VERSION_CHECK)
|
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
profile = cls.get_profile(
|
||||||
if project_renderer is None:
|
project_root,
|
||||||
return partial
|
cli_vars,
|
||||||
else:
|
args,
|
||||||
project = partial.render(project_renderer)
|
)
|
||||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
flags = get_flags()
|
||||||
return project
|
project = load_project(project_root, bool(flags.VERSION_CHECK), profile, cli_vars)
|
||||||
|
return project, profile
|
||||||
|
|
||||||
# Called in main.py, lib.py, task/base.py
|
# Called in main.py, lib.py, task/base.py
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -411,8 +413,8 @@ class UnsetCredentials(Credentials):
|
|||||||
return ()
|
return ()
|
||||||
|
|
||||||
|
|
||||||
# This is used by UnsetProfileConfig, for commands which do
|
# This is used by commands which do not require
|
||||||
# not require a profile, i.e. dbt deps and clean
|
# a profile, i.e. dbt deps and clean
|
||||||
class UnsetProfile(Profile):
|
class UnsetProfile(Profile):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.credentials = UnsetCredentials()
|
self.credentials = UnsetCredentials()
|
||||||
@@ -431,182 +433,12 @@ class UnsetProfile(Profile):
|
|||||||
return Profile.__getattribute__(self, name)
|
return Profile.__getattribute__(self, name)
|
||||||
|
|
||||||
|
|
||||||
# This class is used by the dbt deps and clean commands, because they don't
|
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
||||||
# require a functioning profile.
|
Configuration paths exist in your dbt_project.yml file which do not \
|
||||||
@dataclass
|
apply to any resources.
|
||||||
class UnsetProfileConfig(RuntimeConfig):
|
There are {} unused configuration paths:
|
||||||
"""This class acts a lot _like_ a RuntimeConfig, except if your profile is
|
{}
|
||||||
missing, any access to profile members results in an exception.
|
"""
|
||||||
"""
|
|
||||||
|
|
||||||
profile_name: str = field(repr=False)
|
|
||||||
target_name: str = field(repr=False)
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
|
||||||
# `del` the attrs we don't want users touching.
|
|
||||||
del self.profile_name
|
|
||||||
del self.target_name
|
|
||||||
# don't call super().__post_init__(), as that calls validate(), and
|
|
||||||
# this object isn't very valid
|
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
|
||||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
|
||||||
if name in {"profile_name", "target_name"}:
|
|
||||||
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
|
|
||||||
|
|
||||||
# avoid every attribute access triggering infinite recursion
|
|
||||||
return RuntimeConfig.__getattribute__(self, name)
|
|
||||||
|
|
||||||
def to_target_dict(self):
|
|
||||||
# re-override the poisoned profile behavior
|
|
||||||
return DictDefaultEmptyStr({})
|
|
||||||
|
|
||||||
def to_project_config(self, with_packages=False):
|
|
||||||
"""Return a dict representation of the config that could be written to
|
|
||||||
disk with `yaml.safe_dump` to get this configuration.
|
|
||||||
|
|
||||||
Overrides dbt.config.Project.to_project_config to omit undefined profile
|
|
||||||
attributes.
|
|
||||||
|
|
||||||
:param with_packages bool: If True, include the serialized packages
|
|
||||||
file in the root.
|
|
||||||
:returns dict: The serialized profile.
|
|
||||||
"""
|
|
||||||
result = deepcopy(
|
|
||||||
{
|
|
||||||
"name": self.project_name,
|
|
||||||
"version": self.version,
|
|
||||||
"project-root": self.project_root,
|
|
||||||
"profile": "",
|
|
||||||
"model-paths": self.model_paths,
|
|
||||||
"macro-paths": self.macro_paths,
|
|
||||||
"seed-paths": self.seed_paths,
|
|
||||||
"test-paths": self.test_paths,
|
|
||||||
"analysis-paths": self.analysis_paths,
|
|
||||||
"docs-paths": self.docs_paths,
|
|
||||||
"asset-paths": self.asset_paths,
|
|
||||||
"target-path": self.target_path,
|
|
||||||
"snapshot-paths": self.snapshot_paths,
|
|
||||||
"clean-targets": self.clean_targets,
|
|
||||||
"log-path": self.log_path,
|
|
||||||
"quoting": self.quoting,
|
|
||||||
"models": self.models,
|
|
||||||
"on-run-start": self.on_run_start,
|
|
||||||
"on-run-end": self.on_run_end,
|
|
||||||
"dispatch": self.dispatch,
|
|
||||||
"seeds": self.seeds,
|
|
||||||
"snapshots": self.snapshots,
|
|
||||||
"sources": self.sources,
|
|
||||||
"tests": self.tests,
|
|
||||||
"metrics": self.metrics,
|
|
||||||
"exposures": self.exposures,
|
|
||||||
"vars": self.vars.to_dict(),
|
|
||||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
|
||||||
"config-version": self.config_version,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if self.query_comment:
|
|
||||||
result["query-comment"] = self.query_comment.to_dict(omit_none=True)
|
|
||||||
|
|
||||||
if with_packages:
|
|
||||||
result.update(self.packages.to_dict(omit_none=True))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_parts(
|
|
||||||
cls,
|
|
||||||
project: Project,
|
|
||||||
profile: Profile,
|
|
||||||
args: Any,
|
|
||||||
dependencies: Optional[Mapping[str, "RuntimeConfig"]] = None,
|
|
||||||
) -> "RuntimeConfig":
|
|
||||||
"""Instantiate a RuntimeConfig from its components.
|
|
||||||
|
|
||||||
:param profile: Ignored.
|
|
||||||
:param project: A parsed dbt Project.
|
|
||||||
:param args: The parsed command-line arguments.
|
|
||||||
:returns RuntimeConfig: The new configuration.
|
|
||||||
"""
|
|
||||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
|
||||||
|
|
||||||
return cls(
|
|
||||||
project_name=project.project_name,
|
|
||||||
version=project.version,
|
|
||||||
project_root=project.project_root,
|
|
||||||
model_paths=project.model_paths,
|
|
||||||
macro_paths=project.macro_paths,
|
|
||||||
seed_paths=project.seed_paths,
|
|
||||||
test_paths=project.test_paths,
|
|
||||||
analysis_paths=project.analysis_paths,
|
|
||||||
docs_paths=project.docs_paths,
|
|
||||||
asset_paths=project.asset_paths,
|
|
||||||
target_path=project.target_path,
|
|
||||||
snapshot_paths=project.snapshot_paths,
|
|
||||||
clean_targets=project.clean_targets,
|
|
||||||
log_path=project.log_path,
|
|
||||||
packages_install_path=project.packages_install_path,
|
|
||||||
quoting=project.quoting, # we never use this anyway.
|
|
||||||
models=project.models,
|
|
||||||
on_run_start=project.on_run_start,
|
|
||||||
on_run_end=project.on_run_end,
|
|
||||||
dispatch=project.dispatch,
|
|
||||||
seeds=project.seeds,
|
|
||||||
snapshots=project.snapshots,
|
|
||||||
dbt_version=project.dbt_version,
|
|
||||||
packages=project.packages,
|
|
||||||
manifest_selectors=project.manifest_selectors,
|
|
||||||
selectors=project.selectors,
|
|
||||||
query_comment=project.query_comment,
|
|
||||||
sources=project.sources,
|
|
||||||
tests=project.tests,
|
|
||||||
metrics=project.metrics,
|
|
||||||
exposures=project.exposures,
|
|
||||||
vars=project.vars,
|
|
||||||
config_version=project.config_version,
|
|
||||||
unrendered=project.unrendered,
|
|
||||||
project_env_vars=project.project_env_vars,
|
|
||||||
profile_env_vars=profile.profile_env_vars,
|
|
||||||
profile_name="",
|
|
||||||
target_name="",
|
|
||||||
user_config=UserConfig(),
|
|
||||||
threads=getattr(args, "threads", 1),
|
|
||||||
credentials=UnsetCredentials(),
|
|
||||||
args=args,
|
|
||||||
cli_vars=cli_vars,
|
|
||||||
dependencies=dependencies,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_rendered_profile(
|
|
||||||
cls,
|
|
||||||
args: Any,
|
|
||||||
profile_renderer: ProfileRenderer,
|
|
||||||
profile_name: Optional[str],
|
|
||||||
) -> Profile:
|
|
||||||
|
|
||||||
profile = UnsetProfile()
|
|
||||||
# The profile (for warehouse connection) is not needed, but we want
|
|
||||||
# to get the UserConfig, which is also in profiles.yml
|
|
||||||
user_config = read_user_config(flags.PROFILES_DIR)
|
|
||||||
profile.user_config = user_config
|
|
||||||
return profile
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_args(cls: Type[RuntimeConfig], args: Any) -> "RuntimeConfig":
|
|
||||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
|
||||||
read in packages.yml if it exists, and use them to find the profile to
|
|
||||||
load.
|
|
||||||
|
|
||||||
:param args: The arguments as parsed from the cli.
|
|
||||||
:raises DbtProjectError: If the project is invalid or missing.
|
|
||||||
:raises DbtProfileError: If the profile is invalid or missing.
|
|
||||||
:raises DbtValidationError: If the cli variables are invalid.
|
|
||||||
"""
|
|
||||||
project, profile = cls.collect_parts(args)
|
|
||||||
|
|
||||||
return cls.from_parts(project=project, profile=profile, args=args)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_config_used(path, fqns):
|
def _is_config_used(path, fqns):
|
||||||
|
|||||||
@@ -1,12 +1,7 @@
|
|||||||
from argparse import Namespace
|
from typing import Any, Dict
|
||||||
from typing import Any, Dict, Optional, Union
|
|
||||||
from xmlrpc.client import Boolean
|
|
||||||
from dbt.contracts.project import UserConfig
|
|
||||||
|
|
||||||
import dbt.flags as flags
|
|
||||||
from dbt.clients import yaml_helper
|
from dbt.clients import yaml_helper
|
||||||
from dbt.config import Profile, Project, read_user_config
|
|
||||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import InvalidOptionYAML
|
from dbt.events.types import InvalidOptionYAML
|
||||||
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
|
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
|
||||||
@@ -27,49 +22,3 @@ def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, An
|
|||||||
except DbtValidationError:
|
except DbtValidationError:
|
||||||
fire_event(InvalidOptionYAML(option_name=cli_option_name))
|
fire_event(InvalidOptionYAML(option_name=cli_option_name))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def get_project_config(
|
|
||||||
project_path: str,
|
|
||||||
profile_name: str,
|
|
||||||
args: Namespace = Namespace(),
|
|
||||||
cli_vars: Optional[Dict[str, Any]] = None,
|
|
||||||
profile: Optional[Profile] = None,
|
|
||||||
user_config: Optional[UserConfig] = None,
|
|
||||||
return_dict: Boolean = True,
|
|
||||||
) -> Union[Project, Dict]:
|
|
||||||
"""Returns a project config (dict or object) from a given project path and profile name.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project_path: Path to project
|
|
||||||
profile_name: Name of profile
|
|
||||||
args: An argparse.Namespace that represents what would have been passed in on the
|
|
||||||
command line (optional)
|
|
||||||
cli_vars: A dict of any vars that would have been passed in on the command line (optional)
|
|
||||||
(see parse_cli_vars above for formatting details)
|
|
||||||
profile: A dbt.config.profile.Profile object (optional)
|
|
||||||
user_config: A dbt.contracts.project.UserConfig object (optional)
|
|
||||||
return_dict: Return a dict if true, return the full dbt.config.project.Project object if false
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A full project config
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Generate a profile if not provided
|
|
||||||
if profile is None:
|
|
||||||
# Generate user_config if not provided
|
|
||||||
if user_config is None:
|
|
||||||
user_config = read_user_config(flags.PROFILES_DIR)
|
|
||||||
# Update flags
|
|
||||||
flags.set_from_args(args, user_config)
|
|
||||||
if cli_vars is None:
|
|
||||||
cli_vars = {}
|
|
||||||
profile = Profile.render_from_args(args, ProfileRenderer(cli_vars), profile_name)
|
|
||||||
# Generate a project
|
|
||||||
project = Project.from_project_root(
|
|
||||||
project_path,
|
|
||||||
DbtProjectYamlRenderer(profile),
|
|
||||||
verify_version=bool(flags.VERSION_CHECK),
|
|
||||||
)
|
|
||||||
# Return
|
|
||||||
return project.to_project_config() if return_dict else project
|
|
||||||
|
|||||||
@@ -2,7 +2,8 @@ import json
|
|||||||
import os
|
import os
|
||||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||||
|
|
||||||
from dbt import flags
|
from dbt.flags import get_flags
|
||||||
|
import dbt.flags as flags_module
|
||||||
from dbt import tracking
|
from dbt import tracking
|
||||||
from dbt import utils
|
from dbt import utils
|
||||||
from dbt.clients.jinja import get_rendered
|
from dbt.clients.jinja import get_rendered
|
||||||
@@ -635,7 +636,7 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
|
|
||||||
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
||||||
"""
|
"""
|
||||||
return flags.get_flag_obj()
|
return flags_module.get_flag_obj()
|
||||||
|
|
||||||
@contextmember
|
@contextmember
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -651,7 +652,7 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
{% endmacro %}"
|
{% endmacro %}"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not flags.NO_PRINT:
|
if not get_flags().PRINT:
|
||||||
print(msg)
|
print(msg)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,8 @@ class ConfiguredContext(TargetContext):
|
|||||||
config: AdapterRequiredConfig
|
config: AdapterRequiredConfig
|
||||||
|
|
||||||
def __init__(self, config: AdapterRequiredConfig) -> None:
|
def __init__(self, config: AdapterRequiredConfig) -> None:
|
||||||
super().__init__(config, config.cli_vars)
|
super().__init__(config.to_target_dict(), config.cli_vars)
|
||||||
|
self.config = config
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
def project_name(self) -> str:
|
def project_name(self) -> str:
|
||||||
|
|||||||
@@ -1,15 +1,13 @@
|
|||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from dbt.contracts.connection import HasCredentials
|
|
||||||
|
|
||||||
from dbt.context.base import BaseContext, contextproperty
|
from dbt.context.base import BaseContext, contextproperty
|
||||||
|
|
||||||
|
|
||||||
class TargetContext(BaseContext):
|
class TargetContext(BaseContext):
|
||||||
# subclass is ConfiguredContext
|
# subclass is ConfiguredContext
|
||||||
def __init__(self, config: HasCredentials, cli_vars: Dict[str, Any]):
|
def __init__(self, target_dict: Dict[str, Any], cli_vars: Dict[str, Any]):
|
||||||
super().__init__(cli_vars=cli_vars)
|
super().__init__(cli_vars=cli_vars)
|
||||||
self.config = config
|
self.target_dict = target_dict
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
def target(self) -> Dict[str, Any]:
|
def target(self) -> Dict[str, Any]:
|
||||||
@@ -73,9 +71,4 @@ class TargetContext(BaseContext):
|
|||||||
|----------|-----------|------------------------------------------|
|
|----------|-----------|------------------------------------------|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return self.config.to_target_dict()
|
return self.target_dict
|
||||||
|
|
||||||
|
|
||||||
def generate_target_context(config: HasCredentials, cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
ctx = TargetContext(config, cli_vars)
|
|
||||||
return ctx.to_dict()
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import abc
|
import abc
|
||||||
import itertools
|
import itertools
|
||||||
import hashlib
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
@@ -13,7 +12,7 @@ from typing import (
|
|||||||
Callable,
|
Callable,
|
||||||
)
|
)
|
||||||
from dbt.exceptions import DbtInternalError
|
from dbt.exceptions import DbtInternalError
|
||||||
from dbt.utils import translate_aliases
|
from dbt.utils import translate_aliases, md5
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import NewConnectionOpening
|
from dbt.events.types import NewConnectionOpening
|
||||||
from dbt.events.contextvars import get_node_info
|
from dbt.events.contextvars import get_node_info
|
||||||
@@ -142,7 +141,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
|||||||
raise NotImplementedError("unique_field not implemented for base credentials class")
|
raise NotImplementedError("unique_field not implemented for base credentials class")
|
||||||
|
|
||||||
def hashed_unique_field(self) -> str:
|
def hashed_unique_field(self) -> str:
|
||||||
return hashlib.md5(self.unique_field.encode("utf-8")).hexdigest()
|
return md5(self.unique_field)
|
||||||
|
|
||||||
def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]:
|
def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]:
|
||||||
"""Return an ordered iterator of key/value pairs for pretty-printing."""
|
"""Return an ordered iterator of key/value pairs for pretty-printing."""
|
||||||
|
|||||||
@@ -227,6 +227,7 @@ class SchemaSourceFile(BaseSourceFile):
|
|||||||
sources: List[str] = field(default_factory=list)
|
sources: List[str] = field(default_factory=list)
|
||||||
exposures: List[str] = field(default_factory=list)
|
exposures: List[str] = field(default_factory=list)
|
||||||
metrics: List[str] = field(default_factory=list)
|
metrics: List[str] = field(default_factory=list)
|
||||||
|
groups: List[str] = field(default_factory=list)
|
||||||
# node patches contain models, seeds, snapshots, analyses
|
# node patches contain models, seeds, snapshots, analyses
|
||||||
ndp: List[str] = field(default_factory=list)
|
ndp: List[str] = field(default_factory=list)
|
||||||
# any macro patches in this file by macro unique_id.
|
# any macro patches in this file by macro unique_id.
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ from dbt.contracts.graph.nodes import (
|
|||||||
GenericTestNode,
|
GenericTestNode,
|
||||||
Exposure,
|
Exposure,
|
||||||
Metric,
|
Metric,
|
||||||
|
Group,
|
||||||
UnpatchedSourceDefinition,
|
UnpatchedSourceDefinition,
|
||||||
ManifestNode,
|
ManifestNode,
|
||||||
GraphMemberNode,
|
GraphMemberNode,
|
||||||
@@ -49,7 +50,7 @@ from dbt.helper_types import PathSet
|
|||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import MergedFromState
|
from dbt.events.types import MergedFromState
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt import flags
|
from dbt.flags import get_flags, MP_CONTEXT
|
||||||
from dbt import tracking
|
from dbt import tracking
|
||||||
import dbt.utils
|
import dbt.utils
|
||||||
|
|
||||||
@@ -303,7 +304,7 @@ class ManifestMetadata(BaseArtifactMetadata):
|
|||||||
self.user_id = tracking.active_user.id
|
self.user_id = tracking.active_user.id
|
||||||
|
|
||||||
if self.send_anonymous_usage_stats is None:
|
if self.send_anonymous_usage_stats is None:
|
||||||
self.send_anonymous_usage_stats = flags.SEND_ANONYMOUS_USAGE_STATS
|
self.send_anonymous_usage_stats = get_flags().SEND_ANONYMOUS_USAGE_STATS
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def default(cls):
|
def default(cls):
|
||||||
@@ -599,6 +600,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
docs: MutableMapping[str, Documentation] = field(default_factory=dict)
|
docs: MutableMapping[str, Documentation] = field(default_factory=dict)
|
||||||
exposures: MutableMapping[str, Exposure] = field(default_factory=dict)
|
exposures: MutableMapping[str, Exposure] = field(default_factory=dict)
|
||||||
metrics: MutableMapping[str, Metric] = field(default_factory=dict)
|
metrics: MutableMapping[str, Metric] = field(default_factory=dict)
|
||||||
|
groups: MutableMapping[str, Group] = field(default_factory=dict)
|
||||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||||
@@ -631,7 +633,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
metadata={"serialize": lambda x: None, "deserialize": lambda x: None},
|
metadata={"serialize": lambda x: None, "deserialize": lambda x: None},
|
||||||
)
|
)
|
||||||
_lock: Lock = field(
|
_lock: Lock = field(
|
||||||
default_factory=flags.MP_CONTEXT.Lock,
|
default_factory=MP_CONTEXT.Lock,
|
||||||
metadata={"serialize": lambda x: None, "deserialize": lambda x: None},
|
metadata={"serialize": lambda x: None, "deserialize": lambda x: None},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -643,7 +645,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __post_deserialize__(cls, obj):
|
def __post_deserialize__(cls, obj):
|
||||||
obj._lock = flags.MP_CONTEXT.Lock()
|
obj._lock = MP_CONTEXT.Lock()
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def sync_update_node(self, new_node: ManifestNode) -> ManifestNode:
|
def sync_update_node(self, new_node: ManifestNode) -> ManifestNode:
|
||||||
@@ -684,6 +686,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
"""
|
"""
|
||||||
self.flat_graph = {
|
self.flat_graph = {
|
||||||
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
||||||
|
"groups": {k: v.to_dict(omit_none=False) for k, v in self.groups.items()},
|
||||||
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
||||||
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
||||||
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
||||||
@@ -775,6 +778,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||||
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
||||||
|
groups={k: _deepcopy(v) for k, v in self.groups.items()},
|
||||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||||
@@ -816,6 +820,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
docs=self.docs,
|
docs=self.docs,
|
||||||
exposures=self.exposures,
|
exposures=self.exposures,
|
||||||
metrics=self.metrics,
|
metrics=self.metrics,
|
||||||
|
groups=self.groups,
|
||||||
selectors=self.selectors,
|
selectors=self.selectors,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
disabled=self.disabled,
|
disabled=self.disabled,
|
||||||
@@ -1070,6 +1075,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
source_file.metrics.append(node.unique_id)
|
source_file.metrics.append(node.unique_id)
|
||||||
if isinstance(node, Exposure):
|
if isinstance(node, Exposure):
|
||||||
source_file.exposures.append(node.unique_id)
|
source_file.exposures.append(node.unique_id)
|
||||||
|
if isinstance(node, Group):
|
||||||
|
source_file.groups.append(node.unique_id)
|
||||||
else:
|
else:
|
||||||
source_file.nodes.append(node.unique_id)
|
source_file.nodes.append(node.unique_id)
|
||||||
|
|
||||||
@@ -1083,6 +1090,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.metrics[metric.unique_id] = metric
|
self.metrics[metric.unique_id] = metric
|
||||||
source_file.metrics.append(metric.unique_id)
|
source_file.metrics.append(metric.unique_id)
|
||||||
|
|
||||||
|
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||||
|
_check_duplicates(group, self.groups)
|
||||||
|
self.groups[group.unique_id] = group
|
||||||
|
source_file.groups.append(group.unique_id)
|
||||||
|
|
||||||
def add_disabled_nofile(self, node: GraphMemberNode):
|
def add_disabled_nofile(self, node: GraphMemberNode):
|
||||||
# There can be multiple disabled nodes for the same unique_id
|
# There can be multiple disabled nodes for the same unique_id
|
||||||
if node.unique_id in self.disabled:
|
if node.unique_id in self.disabled:
|
||||||
@@ -1125,6 +1137,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.docs,
|
self.docs,
|
||||||
self.exposures,
|
self.exposures,
|
||||||
self.metrics,
|
self.metrics,
|
||||||
|
self.groups,
|
||||||
self.selectors,
|
self.selectors,
|
||||||
self.files,
|
self.files,
|
||||||
self.metadata,
|
self.metadata,
|
||||||
@@ -1178,10 +1191,13 @@ class WritableManifest(ArtifactMixin):
|
|||||||
metrics: Mapping[UniqueID, Metric] = field(
|
metrics: Mapping[UniqueID, Metric] = field(
|
||||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||||
)
|
)
|
||||||
|
groups: Mapping[UniqueID, Group] = field(
|
||||||
|
metadata=dict(description=("The groups defined in the dbt project"))
|
||||||
|
)
|
||||||
selectors: Mapping[UniqueID, Any] = field(
|
selectors: Mapping[UniqueID, Any] = field(
|
||||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||||
)
|
)
|
||||||
disabled: Optional[Mapping[UniqueID, List[ResultNode]]] = field(
|
disabled: Optional[Mapping[UniqueID, List[GraphMemberNode]]] = field(
|
||||||
metadata=dict(description="A mapping of the disabled nodes in the target")
|
metadata=dict(description="A mapping of the disabled nodes in the target")
|
||||||
)
|
)
|
||||||
parent_map: Optional[NodeEdgeMap] = field(
|
parent_map: Optional[NodeEdgeMap] = field(
|
||||||
|
|||||||
@@ -446,6 +446,7 @@ class NodeConfig(NodeAndTestConfig):
|
|||||||
default_factory=Docs,
|
default_factory=Docs,
|
||||||
metadata=MergeBehavior.Update.meta(),
|
metadata=MergeBehavior.Update.meta(),
|
||||||
)
|
)
|
||||||
|
constraints_enabled: Optional[bool] = False
|
||||||
|
|
||||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ from dbt.contracts.graph.unparsed import (
|
|||||||
UnparsedSourceTableDefinition,
|
UnparsedSourceTableDefinition,
|
||||||
UnparsedColumn,
|
UnparsedColumn,
|
||||||
TestDef,
|
TestDef,
|
||||||
ExposureOwner,
|
Owner,
|
||||||
ExposureType,
|
ExposureType,
|
||||||
MaturityType,
|
MaturityType,
|
||||||
MetricFilter,
|
MetricFilter,
|
||||||
@@ -37,6 +37,7 @@ from dbt.contracts.graph.unparsed import (
|
|||||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||||
from dbt.events.proto_types import NodeInfo
|
from dbt.events.proto_types import NodeInfo
|
||||||
from dbt.events.functions import warn_or_error
|
from dbt.events.functions import warn_or_error
|
||||||
|
from dbt.exceptions import ParsingError
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
SeedIncreased,
|
SeedIncreased,
|
||||||
SeedExceedsLimitSamePath,
|
SeedExceedsLimitSamePath,
|
||||||
@@ -44,7 +45,7 @@ from dbt.events.types import (
|
|||||||
SeedExceedsLimitChecksumChanged,
|
SeedExceedsLimitChecksumChanged,
|
||||||
)
|
)
|
||||||
from dbt.events.contextvars import set_contextvars
|
from dbt.events.contextvars import set_contextvars
|
||||||
from dbt import flags
|
from dbt.flags import get_flags
|
||||||
from dbt.node_types import ModelLanguage, NodeType
|
from dbt.node_types import ModelLanguage, NodeType
|
||||||
from dbt.utils import cast_dict_to_dict_of_strings
|
from dbt.utils import cast_dict_to_dict_of_strings
|
||||||
|
|
||||||
@@ -60,6 +61,7 @@ from .model_config import (
|
|||||||
SnapshotConfig,
|
SnapshotConfig,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# =====================================================================
|
# =====================================================================
|
||||||
# This contains the classes for all of the nodes and node-like objects
|
# This contains the classes for all of the nodes and node-like objects
|
||||||
# in the manifest. In the "nodes" dictionary of the manifest we find
|
# in the manifest. In the "nodes" dictionary of the manifest we find
|
||||||
@@ -145,6 +147,8 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable
|
|||||||
description: str = ""
|
description: str = ""
|
||||||
meta: Dict[str, Any] = field(default_factory=dict)
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
data_type: Optional[str] = None
|
data_type: Optional[str] = None
|
||||||
|
constraints: Optional[List[str]] = None
|
||||||
|
constraints_check: Optional[str] = None
|
||||||
quote: Optional[bool] = None
|
quote: Optional[bool] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
@@ -399,6 +403,7 @@ class CompiledNode(ParsedNode):
|
|||||||
extra_ctes_injected: bool = False
|
extra_ctes_injected: bool = False
|
||||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||||
_pre_injected_sql: Optional[str] = None
|
_pre_injected_sql: Optional[str] = None
|
||||||
|
constraints_enabled: bool = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
@@ -482,6 +487,7 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
|||||||
# seeds need the root_path because the contents are not loaded initially
|
# seeds need the root_path because the contents are not loaded initially
|
||||||
# and we need the root_path to load the seed later
|
# and we need the root_path to load the seed later
|
||||||
root_path: Optional[str] = None
|
root_path: Optional[str] = None
|
||||||
|
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
|
||||||
|
|
||||||
def same_seeds(self, other: "SeedNode") -> bool:
|
def same_seeds(self, other: "SeedNode") -> bool:
|
||||||
# for seeds, we check the hashes. If the hashes are different types,
|
# for seeds, we check the hashes. If the hashes are different types,
|
||||||
@@ -523,6 +529,39 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
|||||||
"""Seeds are never empty"""
|
"""Seeds are never empty"""
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _disallow_implicit_dependencies(self):
|
||||||
|
"""Disallow seeds to take implicit upstream dependencies via pre/post hooks"""
|
||||||
|
# Seeds are root nodes in the DAG. They cannot depend on other nodes.
|
||||||
|
# However, it's possible to define pre- and post-hooks on seeds, and for those
|
||||||
|
# hooks to include {{ ref(...) }}. This worked in previous versions, but it
|
||||||
|
# was never officially documented or supported behavior. Let's raise an explicit error,
|
||||||
|
# which will surface during parsing if the user has written code such that we attempt
|
||||||
|
# to capture & record a ref/source/metric call on the SeedNode.
|
||||||
|
# For more details: https://github.com/dbt-labs/dbt-core/issues/6806
|
||||||
|
hooks = [f'- pre_hook: "{hook.sql}"' for hook in self.config.pre_hook] + [
|
||||||
|
f'- post_hook: "{hook.sql}"' for hook in self.config.post_hook
|
||||||
|
]
|
||||||
|
hook_list = "\n".join(hooks)
|
||||||
|
message = f"""
|
||||||
|
Seeds cannot depend on other nodes. dbt detected a seed with a pre- or post-hook
|
||||||
|
that calls 'ref', 'source', or 'metric', either directly or indirectly via other macros.
|
||||||
|
|
||||||
|
Error raised for '{self.unique_id}', which has these hooks defined: \n{hook_list}
|
||||||
|
"""
|
||||||
|
raise ParsingError(message)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def refs(self):
|
||||||
|
self._disallow_implicit_dependencies()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sources(self):
|
||||||
|
self._disallow_implicit_dependencies()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def metrics(self):
|
||||||
|
self._disallow_implicit_dependencies()
|
||||||
|
|
||||||
def same_body(self, other) -> bool:
|
def same_body(self, other) -> bool:
|
||||||
return self.same_seeds(other)
|
return self.same_seeds(other)
|
||||||
|
|
||||||
@@ -531,8 +570,8 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def depends_on_macros(self):
|
def depends_on_macros(self) -> List[str]:
|
||||||
return []
|
return self.depends_on.macros
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extra_ctes(self):
|
def extra_ctes(self):
|
||||||
@@ -557,7 +596,7 @@ class TestShouldStoreFailures:
|
|||||||
def should_store_failures(self):
|
def should_store_failures(self):
|
||||||
if self.config.store_failures:
|
if self.config.store_failures:
|
||||||
return self.config.store_failures
|
return self.config.store_failures
|
||||||
return flags.STORE_FAILURES
|
return get_flags().STORE_FAILURES
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_relational(self):
|
def is_relational(self):
|
||||||
@@ -892,7 +931,7 @@ class SourceDefinition(NodeInfoMixin, ParsedSourceMandatory):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class Exposure(GraphNode):
|
class Exposure(GraphNode):
|
||||||
type: ExposureType
|
type: ExposureType
|
||||||
owner: ExposureOwner
|
owner: Owner
|
||||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Exposure]})
|
resource_type: NodeType = field(metadata={"restrict": [NodeType.Exposure]})
|
||||||
description: str = ""
|
description: str = ""
|
||||||
label: Optional[str] = None
|
label: Optional[str] = None
|
||||||
@@ -1065,6 +1104,18 @@ class Metric(GraphNode):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ====================================
|
||||||
|
# Group node
|
||||||
|
# ====================================
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Group(BaseNode):
|
||||||
|
name: str
|
||||||
|
owner: Owner
|
||||||
|
resource_type: NodeType = field(metadata={"restrict": [NodeType.Group]})
|
||||||
|
|
||||||
|
|
||||||
# ====================================
|
# ====================================
|
||||||
# Patches
|
# Patches
|
||||||
# ====================================
|
# ====================================
|
||||||
@@ -1133,6 +1184,7 @@ Resource = Union[
|
|||||||
GraphMemberNode,
|
GraphMemberNode,
|
||||||
Documentation,
|
Documentation,
|
||||||
Macro,
|
Macro,
|
||||||
|
Group,
|
||||||
]
|
]
|
||||||
|
|
||||||
TestNode = Union[
|
TestNode = Union[
|
||||||
|
|||||||
@@ -93,6 +93,8 @@ class HasDocs(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable):
|
|||||||
description: str = ""
|
description: str = ""
|
||||||
meta: Dict[str, Any] = field(default_factory=dict)
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
data_type: Optional[str] = None
|
data_type: Optional[str] = None
|
||||||
|
constraints: Optional[List[str]] = None
|
||||||
|
constraints_check: Optional[str] = None
|
||||||
docs: Docs = field(default_factory=Docs)
|
docs: Docs = field(default_factory=Docs)
|
||||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
@@ -424,8 +426,8 @@ class MaturityType(StrEnum):
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ExposureOwner(dbtClassMixin, Replaceable):
|
class Owner(AdditionalPropertiesAllowed, Replaceable):
|
||||||
email: str
|
email: Optional[str] = None
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
@@ -433,7 +435,7 @@ class ExposureOwner(dbtClassMixin, Replaceable):
|
|||||||
class UnparsedExposure(dbtClassMixin, Replaceable):
|
class UnparsedExposure(dbtClassMixin, Replaceable):
|
||||||
name: str
|
name: str
|
||||||
type: ExposureType
|
type: ExposureType
|
||||||
owner: ExposureOwner
|
owner: Owner
|
||||||
description: str = ""
|
description: str = ""
|
||||||
label: Optional[str] = None
|
label: Optional[str] = None
|
||||||
maturity: Optional[MaturityType] = None
|
maturity: Optional[MaturityType] = None
|
||||||
@@ -451,6 +453,9 @@ class UnparsedExposure(dbtClassMixin, Replaceable):
|
|||||||
if not (re.match(r"[\w-]+$", data["name"])):
|
if not (re.match(r"[\w-]+$", data["name"])):
|
||||||
deprecations.warn("exposure-name", exposure=data["name"])
|
deprecations.warn("exposure-name", exposure=data["name"])
|
||||||
|
|
||||||
|
if data["owner"].get("name") is None and data["owner"].get("email") is None:
|
||||||
|
raise ValidationError("Exposure owner must have at least one of 'name' or 'email'.")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MetricFilter(dbtClassMixin, Replaceable):
|
class MetricFilter(dbtClassMixin, Replaceable):
|
||||||
@@ -533,3 +538,15 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
|||||||
|
|
||||||
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
||||||
raise ValidationError("Derived metrics cannot have a 'model' property")
|
raise ValidationError("Derived metrics cannot have a 'model' property")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UnparsedGroup(dbtClassMixin, Replaceable):
|
||||||
|
name: str
|
||||||
|
owner: Owner
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, data):
|
||||||
|
super(UnparsedGroup, cls).validate(data)
|
||||||
|
if data["owner"].get("name") is None and data["owner"].get("email") is None:
|
||||||
|
raise ValidationError("Group owner must have at least one of 'name' or 'email'.")
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from dbt.events.types import TimingInfoCollected
|
|||||||
from dbt.events.proto_types import RunResultMsg, TimingInfoMsg
|
from dbt.events.proto_types import RunResultMsg, TimingInfoMsg
|
||||||
from dbt.events.contextvars import get_node_info
|
from dbt.events.contextvars import get_node_info
|
||||||
from dbt.logger import TimingProcessor
|
from dbt.logger import TimingProcessor
|
||||||
from dbt.utils import lowercase, cast_to_str, cast_to_int
|
from dbt.utils import lowercase, cast_to_str, cast_to_int, cast_dict_to_dict_of_strings
|
||||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||||
|
|
||||||
import agate
|
import agate
|
||||||
@@ -130,7 +130,6 @@ class BaseResult(dbtClassMixin):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def to_msg(self):
|
def to_msg(self):
|
||||||
# TODO: add more fields
|
|
||||||
msg = RunResultMsg()
|
msg = RunResultMsg()
|
||||||
msg.status = str(self.status)
|
msg.status = str(self.status)
|
||||||
msg.message = cast_to_str(self.message)
|
msg.message = cast_to_str(self.message)
|
||||||
@@ -138,7 +137,7 @@ class BaseResult(dbtClassMixin):
|
|||||||
msg.execution_time = self.execution_time
|
msg.execution_time = self.execution_time
|
||||||
msg.num_failures = cast_to_int(self.failures)
|
msg.num_failures = cast_to_int(self.failures)
|
||||||
msg.timing_info = [ti.to_msg() for ti in self.timing]
|
msg.timing_info = [ti.to_msg() for ti in self.timing]
|
||||||
# adapter_response
|
msg.adapter_response = cast_dict_to_dict_of_strings(self.adapter_response)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -250,7 +250,6 @@ def upgrade_seed_content(node_content):
|
|||||||
"refs",
|
"refs",
|
||||||
"sources",
|
"sources",
|
||||||
"metrics",
|
"metrics",
|
||||||
"depends_on",
|
|
||||||
"compiled_path",
|
"compiled_path",
|
||||||
"compiled",
|
"compiled",
|
||||||
"compiled_code",
|
"compiled_code",
|
||||||
@@ -260,6 +259,8 @@ def upgrade_seed_content(node_content):
|
|||||||
):
|
):
|
||||||
if attr_name in node_content:
|
if attr_name in node_content:
|
||||||
del node_content[attr_name]
|
del node_content[attr_name]
|
||||||
|
# In v1.4, we switched SeedNode.depends_on from DependsOn to MacroDependsOn
|
||||||
|
node_content.get("depends_on", {}).pop("nodes", None)
|
||||||
|
|
||||||
|
|
||||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||||
@@ -274,6 +275,9 @@ def upgrade_manifest_json(manifest: dict) -> dict:
|
|||||||
upgrade_node_content(node_content)
|
upgrade_node_content(node_content)
|
||||||
if node_content["resource_type"] == "seed":
|
if node_content["resource_type"] == "seed":
|
||||||
upgrade_seed_content(node_content)
|
upgrade_seed_content(node_content)
|
||||||
|
# add group key
|
||||||
|
if "groups" not in manifest:
|
||||||
|
manifest["groups"] = {}
|
||||||
for metric_content in manifest.get("metrics", {}).values():
|
for metric_content in manifest.get("metrics", {}).values():
|
||||||
# handle attr renames + value translation ("expression" -> "derived")
|
# handle attr renames + value translation ("expression" -> "derived")
|
||||||
metric_content = rename_metric_attr(metric_content)
|
metric_content = rename_metric_attr(metric_content)
|
||||||
@@ -283,7 +287,7 @@ def upgrade_manifest_json(manifest: dict) -> dict:
|
|||||||
if "root_path" in exposure_content:
|
if "root_path" in exposure_content:
|
||||||
del exposure_content["root_path"]
|
del exposure_content["root_path"]
|
||||||
for source_content in manifest.get("sources", {}).values():
|
for source_content in manifest.get("sources", {}).values():
|
||||||
if "root_path" in exposure_content:
|
if "root_path" in source_content:
|
||||||
del source_content["root_path"]
|
del source_content["root_path"]
|
||||||
for macro_content in manifest.get("macros", {}).values():
|
for macro_content in manifest.get("macros", {}).values():
|
||||||
if "root_path" in macro_content:
|
if "root_path" in macro_content:
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user