mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 01:31:27 +00:00
Compare commits
186 Commits
adding-sem
...
callum_tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec1c9f0362 | ||
|
|
caeecf4a67 | ||
|
|
fcea5969ae | ||
|
|
672a7d7fc8 | ||
|
|
a033aa0180 | ||
|
|
38991cd3a9 | ||
|
|
4f966b0d54 | ||
|
|
d3eaa37212 | ||
|
|
ce1759f793 | ||
|
|
4b3e797530 | ||
|
|
3e9ed1ff9b | ||
|
|
9b0dce32b8 | ||
|
|
ce8e886f38 | ||
|
|
12d02dc9d1 | ||
|
|
0ef9931d19 | ||
|
|
a2213abbc0 | ||
|
|
915585c36e | ||
|
|
72f59da9df | ||
|
|
0dd99eac83 | ||
|
|
f0564f9d32 | ||
|
|
5ddd40885e | ||
|
|
58d1bccd26 | ||
|
|
70c26f5c74 | ||
|
|
278e4c7673 | ||
|
|
ac962a4a31 | ||
|
|
bb2d062cc5 | ||
|
|
7667784985 | ||
|
|
e3ec07d035 | ||
|
|
c7aa2ed7ef | ||
|
|
05ecfbcc3a | ||
|
|
e06ae97068 | ||
|
|
e49e259950 | ||
|
|
ed50877c4f | ||
|
|
140597276b | ||
|
|
6712a5841a | ||
|
|
6b5e38ee28 | ||
|
|
63a1bf9adb | ||
|
|
f4356d8dd2 | ||
|
|
7c715c5625 | ||
|
|
5b9a24fd23 | ||
|
|
6378c13e7a | ||
|
|
2c7238fbb4 | ||
|
|
b1d597109f | ||
|
|
7617eece3a | ||
|
|
8ce92b56d7 | ||
|
|
21fae1c4a4 | ||
|
|
2db94c5788 | ||
|
|
f25c8f39fc | ||
|
|
3b8b191623 | ||
|
|
246fd66e8e | ||
|
|
c952d44ec5 | ||
|
|
817d39ac14 | ||
|
|
85e27ac747 | ||
|
|
971b38c26b | ||
|
|
b5ca2e4c5f | ||
|
|
b7884facbf | ||
|
|
57ce461067 | ||
|
|
e69b465c41 | ||
|
|
b1b830643e | ||
|
|
3cee9d16fa | ||
|
|
c647706ac2 | ||
|
|
6937b321d6 | ||
|
|
a6fc443abc | ||
|
|
7b33ffb1bd | ||
|
|
340cae3b43 | ||
|
|
f38cbc4feb | ||
|
|
480e0e55c5 | ||
|
|
e5c468bb93 | ||
|
|
605c72e86e | ||
|
|
aad46ac5a8 | ||
|
|
d85618ef26 | ||
|
|
1250f23c44 | ||
|
|
daea7d59a7 | ||
|
|
4575757c2a | ||
|
|
91c5e2cc86 | ||
|
|
d7a2f77705 | ||
|
|
4a4b89606b | ||
|
|
1ebe2e7118 | ||
|
|
bafae0326b | ||
|
|
7e1b788bd8 | ||
|
|
1bd2fe09a1 | ||
|
|
5b0197635d | ||
|
|
f1087e57bf | ||
|
|
250537ba58 | ||
|
|
ccc7222868 | ||
|
|
c1ad7b0f0e | ||
|
|
311a57a21e | ||
|
|
b7c45de6b1 | ||
|
|
c53c3cf181 | ||
|
|
a77d325c8a | ||
|
|
dd41384d82 | ||
|
|
aa55fb2d30 | ||
|
|
864f4efb8b | ||
|
|
83c5a8c24b | ||
|
|
57aef33fb3 | ||
|
|
6d78e5e640 | ||
|
|
f54a876f65 | ||
|
|
8bbae7926b | ||
|
|
db2b12021e | ||
|
|
8b2c9bf39d | ||
|
|
298bf8a1d4 | ||
|
|
77748571b4 | ||
|
|
8ce4c289c5 | ||
|
|
abbece8876 | ||
|
|
3ad40372e6 | ||
|
|
c6d0e7c926 | ||
|
|
bc015843d4 | ||
|
|
df64511feb | ||
|
|
db0981afe7 | ||
|
|
dcf6544f93 | ||
|
|
c2c8959fee | ||
|
|
ccb4fa26cd | ||
|
|
d0b5d752df | ||
|
|
4c63b630de | ||
|
|
9c0b62b4f5 | ||
|
|
e08eede5e2 | ||
|
|
05e53d4143 | ||
|
|
b2ea2b8b25 | ||
|
|
2245d8d710 | ||
|
|
d9424cc710 | ||
|
|
0503c141b7 | ||
|
|
1a6e4a00c7 | ||
|
|
42b7caae19 | ||
|
|
622e5fd71d | ||
|
|
59d773ea7e | ||
|
|
84bf5b4620 | ||
|
|
726c4d6c58 | ||
|
|
acc88d47a3 | ||
|
|
0a74594d09 | ||
|
|
d2f3cdd6de | ||
|
|
92d1ef8482 | ||
|
|
a8abc49632 | ||
|
|
d6ac340df0 | ||
|
|
c653330911 | ||
|
|
82d9b2fa87 | ||
|
|
3f96fad4f9 | ||
|
|
c2c4757a2b | ||
|
|
08b2d94ccd | ||
|
|
7fa61f0816 | ||
|
|
c65ba11ae6 | ||
|
|
b0651b13b5 | ||
|
|
a34521ec07 | ||
|
|
da47b90503 | ||
|
|
d27016a4e7 | ||
|
|
db99e2f68d | ||
|
|
cbb9117ab9 | ||
|
|
e2ccf011d9 | ||
|
|
17014bfad3 | ||
|
|
92b7166c10 | ||
|
|
7b464b8a49 | ||
|
|
5c765bf3e2 | ||
|
|
93619a9a37 | ||
|
|
a181cee6ae | ||
|
|
a0ade13f5a | ||
|
|
9823a56e1d | ||
|
|
3aeab73740 | ||
|
|
9801eebc58 | ||
|
|
11c622230c | ||
|
|
f0349488ed | ||
|
|
c85be323f5 | ||
|
|
6954c4df1b | ||
|
|
30a1595f72 | ||
|
|
f841a7ca76 | ||
|
|
07a004b301 | ||
|
|
b05582de39 | ||
|
|
fa7c4d19f0 | ||
|
|
1913eac5ed | ||
|
|
066346faa2 | ||
|
|
0a03355ceb | ||
|
|
53127daad8 | ||
|
|
91b20b7482 | ||
|
|
5b31cc4266 | ||
|
|
9bb1250869 | ||
|
|
cc5a38ec5a | ||
|
|
b0909b8f5d | ||
|
|
5d278dacf1 | ||
|
|
ce1aaec31d | ||
|
|
1809852a0d | ||
|
|
88d2ee4813 | ||
|
|
77be2e4fdf | ||
|
|
e91863de59 | ||
|
|
44b457c191 | ||
|
|
a0ec0b6f9d | ||
|
|
1ec54abdc4 | ||
|
|
5efc4aa066 | ||
|
|
847c0b9644 |
@@ -1,13 +1,19 @@
|
||||
[bumpversion]
|
||||
current_version = 1.5.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
current_version = 1.5.0b1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
||||
(?P<prekind>a|b|rc) # pre-release type
|
||||
(?P<num>[\d]+) # pre-release version number
|
||||
)?
|
||||
( # optional nightly release indicator
|
||||
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
||||
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||
{major}.{minor}.{patch}.{nightly}
|
||||
{major}.{minor}.{patch}{prekind}{num}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
@@ -21,9 +27,11 @@ values =
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:pre]
|
||||
[bumpversion:part:num]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:nightly]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
80
.changes/1.5.0-b1.md
Normal file
80
.changes/1.5.0-b1.md
Normal file
@@ -0,0 +1,80 @@
|
||||
## dbt-core 1.5.0-b1 - February 17, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Data type constraints are now native to SQL table materializations. Enforce columns are specific data types and not null depending on database functionality. ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
- Have dbt debug spit out structured json logs with flags enabled. ([#5353](https://github.com/dbt-labs/dbt-core/issues/5353))
|
||||
- add adapter_response to dbt test and freshness result ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- Improve error message for packages missing `dbt_project.yml` ([#6663](https://github.com/dbt-labs/dbt-core/issues/6663))
|
||||
- Adjust makefile to have clearer instructions for CI env var changes. ([#6689](https://github.com/dbt-labs/dbt-core/issues/6689))
|
||||
- Stand-alone Python module for PostgresColumn ([#6772](https://github.com/dbt-labs/dbt-core/issues/6772))
|
||||
- Exposure owner requires one of name or email keys, and accepts additional arbitrary keys ([#6833](https://github.com/dbt-labs/dbt-core/issues/6833))
|
||||
- Parse 'group' resource ([#6921](https://github.com/dbt-labs/dbt-core/issues/6921))
|
||||
|
||||
### Fixes
|
||||
|
||||
- add merge_exclude_columns adapter tests ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||
- Reapply logging fixes which were accidentally reverted ([#6936](https://github.com/dbt-labs/dbt-core/issues/6936))
|
||||
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||
- Readd initialization events, --log-cache-events in new CLI ([#6933](https://github.com/dbt-labs/dbt-core/issues/6933))
|
||||
- Fix previous state tests and disabled exposures, metrics ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752), [#6753](https://github.com/dbt-labs/dbt-core/issues/6753))
|
||||
- Make use of hashlib.md5() FIPS compliant ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
|
||||
### Docs
|
||||
|
||||
- update link to installation instructions ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||
- Searchable column descriptions ([dbt-docs/#140](https://github.com/dbt-labs/dbt-docs/issues/140), [dbt-docs/#322](https://github.com/dbt-labs/dbt-docs/issues/322), [dbt-docs/#369](https://github.com/dbt-labs/dbt-docs/issues/369))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- [CT-921] dbt compile works in click ([#5545](https://github.com/dbt-labs/dbt-core/issues/5545))
|
||||
- Fix use of ConnectionReused logging event ([#6168](https://github.com/dbt-labs/dbt-core/issues/6168))
|
||||
- Port docs tests to pytest ([#6573](https://github.com/dbt-labs/dbt-core/issues/6573))
|
||||
- Update deprecated github action command ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- dbt snapshot works in click ([#5554](https://github.com/dbt-labs/dbt-core/issues/5554))
|
||||
- dbt list working with click ([#5549](https://github.com/dbt-labs/dbt-core/issues/5549))
|
||||
- Add dbt run-operation to click CLI ([#5552](https://github.com/dbt-labs/dbt-core/issues/5552))
|
||||
- dbt build working with new click framework ([#5541](https://github.com/dbt-labs/dbt-core/issues/5541))
|
||||
- dbt docs generate works with new click framework ([#5543](https://github.com/dbt-labs/dbt-core/issues/5543))
|
||||
- Replaced the EmptyLine event with a more general Formatting event, and added a Note event. ([#6481](https://github.com/dbt-labs/dbt-core/issues/6481))
|
||||
- Small optimization on manifest parsing benefitting large DAGs ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- Revised and simplified various structured logging events ([#6664](https://github.com/dbt-labs/dbt-core/issues/6664), [#6665](https://github.com/dbt-labs/dbt-core/issues/6665), [#6666](https://github.com/dbt-labs/dbt-core/issues/6666))
|
||||
- dbt init works with click ([#5548](https://github.com/dbt-labs/dbt-core/issues/5548))
|
||||
- [CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands ([#5544](https://github.com/dbt-labs/dbt-core/issues/5544), [#6722](https://github.com/dbt-labs/dbt-core/issues/6722))
|
||||
- Migrate debug task to click ([#5546](https://github.com/dbt-labs/dbt-core/issues/5546))
|
||||
- Optimized GraphQueue to remove graph analysis bottleneck in large dags. ([#6759](https://github.com/dbt-labs/dbt-core/issues/6759))
|
||||
- Implement --version for click cli ([#6757](https://github.com/dbt-labs/dbt-core/issues/6757))
|
||||
- [CT-1841] Convert custom target test to Pytest ([#6638](https://github.com/dbt-labs/dbt-core/issues/6638))
|
||||
- Remove BigQuery-specific btye abbreviations ([#6741](https://github.com/dbt-labs/dbt-core/issues/6741))
|
||||
- warn_error/warn_error_options mutual exclusivity in click ([#6579](https://github.com/dbt-labs/dbt-core/issues/6579))
|
||||
- Enables the new Click Cli on the commandline! 🚀 ([#6784](https://github.com/dbt-labs/dbt-core/issues/6784))
|
||||
- Lazily call --version ([#6812](https://github.com/dbt-labs/dbt-core/issues/6812))
|
||||
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
- flags.THREADS defaults to None ([#6887](https://github.com/dbt-labs/dbt-core/issues/6887))
|
||||
- Fixing target type exposure error ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- Test binary serialization of logging events ([#6852](https://github.com/dbt-labs/dbt-core/issues/6852))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump ubuntu from 22.04 to 23.04 ([#6865](https://github.com/dbt-labs/dbt-core/pull/6865))
|
||||
- Revert hoisting dbt.cli.main into the dbt.name namespace ([#](https://github.com/dbt-labs/dbt-core/pull/))
|
||||
|
||||
### Contributors
|
||||
- [@aezomz](https://github.com/aezomz) ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- [@boxysean](https://github.com/boxysean) ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- [@davidbloss](https://github.com/davidbloss) ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||
- [@nielspardon](https://github.com/nielspardon) ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
- [@ryancharris](https://github.com/ryancharris) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
6
.changes/1.5.0/Dependencies-20230206-000926.yaml
Normal file
6
.changes/1.5.0/Dependencies-20230206-000926.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump ubuntu from 22.04 to 23.04"
|
||||
time: 2023-02-06T00:09:26.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 6865
|
||||
6
.changes/1.5.0/Dependencies-20230215-091759.yaml
Normal file
6
.changes/1.5.0/Dependencies-20230215-091759.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Revert hoisting dbt.cli.main into the dbt.name namespace
|
||||
time: 2023-02-15T09:17:59.04148-08:00
|
||||
custom:
|
||||
Author: aranke
|
||||
PR: ''
|
||||
6
.changes/1.5.0/Docs-20230207-123807.yaml
Normal file
6
.changes/1.5.0/Docs-20230207-123807.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: update link to installation instructions
|
||||
time: 2023-02-07T12:38:07.336783-05:00
|
||||
custom:
|
||||
Author: ryancharris
|
||||
Issue: None
|
||||
6
.changes/1.5.0/Docs-20230209-082901.yaml
Normal file
6
.changes/1.5.0/Docs-20230209-082901.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Fix JSON path to overview docs
|
||||
time: 2023-02-09T08:29:01.432616-07:00
|
||||
custom:
|
||||
Author: halvorlu
|
||||
Issue: "366"
|
||||
6
.changes/1.5.0/Docs-20230209-212729.yaml
Normal file
6
.changes/1.5.0/Docs-20230209-212729.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Searchable column descriptions
|
||||
time: 2023-02-09T21:27:29.570243-07:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 140 322 369
|
||||
8
.changes/1.5.0/Features-20221118-141120.yaml
Normal file
8
.changes/1.5.0/Features-20221118-141120.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Features
|
||||
body: Data type constraints are now native to SQL table materializations. Enforce
|
||||
columns are specific data types and not null depending on database functionality.
|
||||
time: 2022-11-18T14:11:20.868062-08:00
|
||||
custom:
|
||||
Author: sungchun12
|
||||
Issue: "6079"
|
||||
PR: "6271"
|
||||
6
.changes/1.5.0/Features-20230107-003157.yaml
Normal file
6
.changes/1.5.0/Features-20230107-003157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Have dbt debug spit out structured json logs with flags enabled.
|
||||
time: 2023-01-07T00:31:57.516063-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "5353"
|
||||
6
.changes/1.5.0/Features-20230118-233801.yaml
Normal file
6
.changes/1.5.0/Features-20230118-233801.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: add adapter_response to dbt test and freshness result
|
||||
time: 2023-01-18T23:38:01.857342+08:00
|
||||
custom:
|
||||
Author: aezomz
|
||||
Issue: "2964"
|
||||
6
.changes/1.5.0/Features-20230120-112921.yaml
Normal file
6
.changes/1.5.0/Features-20230120-112921.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Improve error message for packages missing `dbt_project.yml`
|
||||
time: 2023-01-20T11:29:21.509967-07:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "6663"
|
||||
6
.changes/1.5.0/Features-20230126-154716.yaml
Normal file
6
.changes/1.5.0/Features-20230126-154716.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Adjust makefile to have clearer instructions for CI env var changes.
|
||||
time: 2023-01-26T15:47:16.887327-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "6689"
|
||||
6
.changes/1.5.0/Features-20230127-162812.yaml
Normal file
6
.changes/1.5.0/Features-20230127-162812.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Stand-alone Python module for PostgresColumn
|
||||
time: 2023-01-27T16:28:12.212427-08:00
|
||||
custom:
|
||||
Author: nssalian
|
||||
Issue: "6772"
|
||||
7
.changes/1.5.0/Features-20230209-092059.yaml
Normal file
7
.changes/1.5.0/Features-20230209-092059.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Exposure owner requires one of name or email keys, and accepts additional arbitrary
|
||||
keys
|
||||
time: 2023-02-09T09:20:59.300272-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6833"
|
||||
6
.changes/1.5.0/Features-20230209-093409.yaml
Normal file
6
.changes/1.5.0/Features-20230209-093409.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Parse 'group' resource
|
||||
time: 2023-02-09T09:34:09.547006-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6921"
|
||||
6
.changes/1.5.0/Fixes-20230123-132814.yaml
Normal file
6
.changes/1.5.0/Fixes-20230123-132814.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: add merge_exclude_columns adapter tests
|
||||
time: 2023-01-23T13:28:14.808748-06:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "6699"
|
||||
6
.changes/1.5.0/Fixes-20230124-115837.yaml
Normal file
6
.changes/1.5.0/Fixes-20230124-115837.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Include adapter_response in NodeFinished run_result log event
|
||||
time: 2023-01-24T11:58:37.74179-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6703"
|
||||
6
.changes/1.5.0/Fixes-20230124-141943.yaml
Normal file
6
.changes/1.5.0/Fixes-20230124-141943.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Sort cli vars before hashing for partial parsing
|
||||
time: 2023-01-24T14:19:43.333628-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6710"
|
||||
6
.changes/1.5.0/Fixes-20230125-191739.yaml
Normal file
6
.changes/1.5.0/Fixes-20230125-191739.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: '[Regression] exposure_content referenced incorrectly'
|
||||
time: 2023-01-25T19:17:39.942081-05:00
|
||||
custom:
|
||||
Author: Mathyoub
|
||||
Issue: "6738"
|
||||
6
.changes/1.5.0/Fixes-20230201-154418.yaml
Normal file
6
.changes/1.5.0/Fixes-20230201-154418.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remove pin on packaging and stop using it for prerelease comparisons
|
||||
time: 2023-02-01T15:44:18.279158-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6834"
|
||||
6
.changes/1.5.0/Fixes-20230203-135557.yaml
Normal file
6
.changes/1.5.0/Fixes-20230203-135557.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros
|
||||
time: 2023-02-03T13:55:57.853715+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "6806"
|
||||
6
.changes/1.5.0/Fixes-20230207-143544.yaml
Normal file
6
.changes/1.5.0/Fixes-20230207-143544.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix regression of --quiet cli parameter behavior
|
||||
time: 2023-02-07T14:35:44.160163-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6749"
|
||||
6
.changes/1.5.0/Fixes-20230208-110551.yaml
Normal file
6
.changes/1.5.0/Fixes-20230208-110551.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure results from hooks contain nodes when processing them
|
||||
time: 2023-02-08T11:05:51.952494-06:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6796"
|
||||
6
.changes/1.5.0/Fixes-20230208-154935.yaml
Normal file
6
.changes/1.5.0/Fixes-20230208-154935.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Always flush stdout after logging
|
||||
time: 2023-02-08T15:49:35.175874-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6901"
|
||||
6
.changes/1.5.0/Fixes-20230210-103028.yaml
Normal file
6
.changes/1.5.0/Fixes-20230210-103028.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Reapply logging fixes which were accidentally reverted
|
||||
time: 2023-02-10T10:30:28.179997-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6936"
|
||||
6
.changes/1.5.0/Fixes-20230210-194157.yaml
Normal file
6
.changes/1.5.0/Fixes-20230210-194157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Set relation_name in test nodes at compile time
|
||||
time: 2023-02-10T19:41:57.386766-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6930"
|
||||
6
.changes/1.5.0/Fixes-20230213-130522.yaml
Normal file
6
.changes/1.5.0/Fixes-20230213-130522.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Readd initialization events, --log-cache-events in new CLI
|
||||
time: 2023-02-13T13:05:22.989477+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "6933"
|
||||
6
.changes/1.5.0/Fixes-20230213-170723.yaml
Normal file
6
.changes/1.5.0/Fixes-20230213-170723.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix previous state tests and disabled exposures, metrics
|
||||
time: 2023-02-13T17:07:23.185679-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: 6752 6753
|
||||
6
.changes/1.5.0/Fixes-20230215-104536.yaml
Normal file
6
.changes/1.5.0/Fixes-20230215-104536.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make use of hashlib.md5() FIPS compliant
|
||||
time: 2023-02-15T10:45:36.755797+01:00
|
||||
custom:
|
||||
Author: nielspardon
|
||||
Issue: "6900"
|
||||
6
.changes/1.5.0/Under the Hood-20230111-145143.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230111-145143.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: '[CT-921] dbt compile works in click'
|
||||
time: 2023-01-11T14:51:43.324107-08:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "5545"
|
||||
6
.changes/1.5.0/Under the Hood-20230113-150700.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230113-150700.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Port docs tests to pytest
|
||||
time: 2023-01-13T15:07:00.477038-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6573"
|
||||
6
.changes/1.5.0/Under the Hood-20230117-162505.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230117-162505.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: dbt snapshot works in click
|
||||
time: 2023-01-17T16:25:05.973769-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "5554"
|
||||
6
.changes/1.5.0/Under the Hood-20230117-213729.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230117-213729.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: dbt list working with click
|
||||
time: 2023-01-17T21:37:29.91632-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "5549"
|
||||
6
.changes/1.5.0/Under the Hood-20230119-105304.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230119-105304.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add dbt run-operation to click CLI
|
||||
time: 2023-01-19T10:53:04.154871+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5552"
|
||||
6
.changes/1.5.0/Under the Hood-20230119-205650.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230119-205650.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: dbt build working with new click framework
|
||||
time: 2023-01-19T20:56:50.50549-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "5541"
|
||||
6
.changes/1.5.0/Under the Hood-20230119-211040.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230119-211040.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: dbt docs generate works with new click framework
|
||||
time: 2023-01-19T21:10:40.698851-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "5543"
|
||||
7
.changes/1.5.0/Under the Hood-20230120-172254.yaml
Normal file
7
.changes/1.5.0/Under the Hood-20230120-172254.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Replaced the EmptyLine event with a more general Formatting event, and added
|
||||
a Note event.
|
||||
time: 2023-01-20T17:22:54.45828-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6481"
|
||||
6
.changes/1.5.0/Under the Hood-20230122-215235.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230122-215235.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Small optimization on manifest parsing benefitting large DAGs
|
||||
time: 2023-01-22T21:52:35.549814+01:00
|
||||
custom:
|
||||
Author: boxysean
|
||||
Issue: "6697"
|
||||
6
.changes/1.5.0/Under the Hood-20230124-153553.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230124-153553.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Revised and simplified various structured logging events
|
||||
time: 2023-01-24T15:35:53.065356-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: 6664 6665 6666
|
||||
6
.changes/1.5.0/Under the Hood-20230124-175110.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230124-175110.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: dbt init works with click
|
||||
time: 2023-01-24T17:51:10.74065-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "5548"
|
||||
6
.changes/1.5.0/Under the Hood-20230125-041136.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230125-041136.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: '[CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands'
|
||||
time: 2023-01-25T04:11:36.57506-08:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: 5544 6722
|
||||
6
.changes/1.5.0/Under the Hood-20230125-102606.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230125-102606.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Migrate debug task to click
|
||||
time: 2023-01-25T10:26:06.735994-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5546"
|
||||
6
.changes/1.5.0/Under the Hood-20230126-135939.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230126-135939.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: ' Optimized GraphQueue to remove graph analysis bottleneck in large dags.'
|
||||
time: 2023-01-26T13:59:39.518345-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6759"
|
||||
6
.changes/1.5.0/Under the Hood-20230126-143102.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230126-143102.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Implement --version for click cli
|
||||
time: 2023-01-26T14:31:02.740282-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "6757"
|
||||
6
.changes/1.5.0/Under the Hood-20230126-164741.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230126-164741.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: '[CT-1841] Convert custom target test to Pytest'
|
||||
time: 2023-01-26T16:47:41.198714-08:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "6638"
|
||||
6
.changes/1.5.0/Under the Hood-20230130-153306.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230130-153306.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove BigQuery-specific btye abbreviations
|
||||
time: 2023-01-30T15:33:06.28965-07:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "6741"
|
||||
6
.changes/1.5.0/Under the Hood-20230130-175752.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230130-175752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: "Enables the new Click Cli on the commandline! \U0001F680"
|
||||
time: 2023-01-30T17:57:52.65626-06:00
|
||||
custom:
|
||||
Author: iknox-fa
|
||||
Issue: "6784"
|
||||
6
.changes/1.5.0/Under the Hood-20230130-180917.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230130-180917.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: warn_error/warn_error_options mutual exclusivity in click
|
||||
time: 2023-01-30T18:09:17.240662-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6579"
|
||||
6
.changes/1.5.0/Under the Hood-20230131-141806.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230131-141806.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Lazily call --version
|
||||
time: 2023-01-31T14:18:06.02312-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "6812"
|
||||
6
.changes/1.5.0/Under the Hood-20230203-143551.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230203-143551.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Moving simple_seed to adapter zone to help adapter test conversions
|
||||
time: 2023-02-03T14:35:51.481856-08:00
|
||||
custom:
|
||||
Author: nssalian
|
||||
Issue: CT-1959
|
||||
6
.changes/1.5.0/Under the Hood-20230207-165111.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230207-165111.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: flags.THREADS defaults to None
|
||||
time: 2023-02-07T16:51:11.011984-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6887"
|
||||
6
.changes/1.5.0/Under the Hood-20230210-084647.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230210-084647.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fixing target type exposure error
|
||||
time: 2023-02-10T08:46:47.72936-06:00
|
||||
custom:
|
||||
Author: callum-mcdata
|
||||
Issue: "6928"
|
||||
6
.changes/1.5.0/Under the Hood-20230216-143252.yaml
Normal file
6
.changes/1.5.0/Under the Hood-20230216-143252.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Test binary serialization of logging events
|
||||
time: 2023-02-16T14:32:52.524225-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6852"
|
||||
7
.changes/unreleased/Dependency-20220927-000822.yaml
Normal file
7
.changes/unreleased/Dependency-20220927-000822.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: "Dependency"
|
||||
body: "Bump mypy from 0.971 to 0.981"
|
||||
time: 2022-09-27T00:08:22.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 5937
|
||||
7
.changes/unreleased/Docs-20230113-094855.yaml
Normal file
7
.changes/unreleased/Docs-20230113-094855.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Docs
|
||||
body: Improve displayed message under "Arguments" section for argumentless macro
|
||||
time: 2023-01-13T09:48:55.574898-05:00
|
||||
custom:
|
||||
Author: MartinGuindon
|
||||
Issue: "358"
|
||||
PR: "359"
|
||||
6
.changes/unreleased/Features-20230124-135550.yaml
Normal file
6
.changes/unreleased/Features-20230124-135550.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Make project version optional
|
||||
time: 2023-01-24T13:55:50.86071024-08:00
|
||||
custom:
|
||||
Author: seub
|
||||
Issue: "6603"
|
||||
6
.changes/unreleased/Features-20230214-225134.yaml
Normal file
6
.changes/unreleased/Features-20230214-225134.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: parse 'group' config on groupable nodes
|
||||
time: 2023-02-14T22:51:34.936228-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6823"
|
||||
6
.changes/unreleased/Features-20230216-144534.yaml
Normal file
6
.changes/unreleased/Features-20230216-144534.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Implemented new log cli parameters for finer-grained control.
|
||||
time: 2023-02-16T14:45:34.038453-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6639"
|
||||
6
.changes/unreleased/Features-20230218-092816.yaml
Normal file
6
.changes/unreleased/Features-20230218-092816.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add access attribute to parsed nodes
|
||||
time: 2023-02-18T09:28:16.448175-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6824"
|
||||
7
.changes/unreleased/Fixes-20221030-102114.yaml
Normal file
7
.changes/unreleased/Fixes-20221030-102114.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Remove trailing slashes from source paths (#6102)
|
||||
time: 2022-10-30T10:21:14.660221Z
|
||||
custom:
|
||||
Author: jmg-duarte
|
||||
Issue: "6102"
|
||||
PR: "6179"
|
||||
6
.changes/unreleased/Fixes-20230221-170630.yaml
Normal file
6
.changes/unreleased/Fixes-20230221-170630.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix compilation logic for ephemeral nodes
|
||||
time: 2023-02-21T17:06:30.218568-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6885"
|
||||
6
.changes/unreleased/Fixes-20230224-001338.yaml
Normal file
6
.changes/unreleased/Fixes-20230224-001338.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix semver comparison logic by ensuring numeric values
|
||||
time: 2023-02-24T00:13:38.23242+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "7039"
|
||||
6
.changes/unreleased/Under the Hood-20230217-105223.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230217-105223.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Rename "constraint_enabled" to "contract"
|
||||
time: 2023-02-17T10:52:23.212474-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6748"
|
||||
@@ -4,6 +4,7 @@ headerPath: header.tpl.md
|
||||
versionHeaderPath: ""
|
||||
changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
envPrefix: "CHANGIE_"
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: |-
|
||||
@@ -87,32 +88,44 @@ custom:
|
||||
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
||||
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
||||
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
||||
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
||||
{{- range $team_member := $core_team }}
|
||||
{{- $team_member_lower := lower $team_member }}
|
||||
{{- $maintainers = append $maintainers $team_member_lower }}
|
||||
{{- end }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a single changelog */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- if not (has $authorLower $maintainers)}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core issues */}}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $change.Custom.PR }}
|
||||
{{- else if eq $change.Kind "Docs"}}
|
||||
{{- $changeLink = "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $contributionList := get $contributorDict $author }}
|
||||
{{- $contributionList = append $contributionList $changeLink }}
|
||||
{{- $contributionList = concat $contributionList $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- else }}
|
||||
{{- $contributionList := list $changeLink }}
|
||||
{{- $contributionList := $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
|
||||
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
# **what?**
|
||||
# Cuts a new `*.latest` branch
|
||||
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
||||
# `main` and bumps `main` to the input version.
|
||||
|
||||
# **why?**
|
||||
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
||||
|
||||
# **when?**
|
||||
# This will run when called manually.
|
||||
|
||||
name: Cut new release branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_to_bump_main:
|
||||
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
||||
required: true
|
||||
new_branch_name:
|
||||
description: 'The full name of the new branch (ex. 1.5.latest)'
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cut_branch:
|
||||
name: "Cut branch and clean up main for dbt-core"
|
||||
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||
with:
|
||||
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
||||
new_branch_name: ${{ inputs.new_branch_name }}
|
||||
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||
secrets:
|
||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
109
.github/workflows/nightly-release.yml
vendored
Normal file
109
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# **what?**
|
||||
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||
# - pass data to release workflow;
|
||||
# - night release will be pushed to GitHub as a draft release;
|
||||
# - night build will be pushed to test PyPI;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process for nightly builds
|
||||
#
|
||||
# **when?**
|
||||
# This workflow runs on schedule or can be run manually on demand.
|
||||
|
||||
name: Nightly Test Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch: # for manual triggering
|
||||
schedule:
|
||||
- cron: 0 9 * * *
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
RELEASE_BRANCH: "main"
|
||||
|
||||
jobs:
|
||||
aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.RELEASE_BRANCH }}
|
||||
|
||||
- name: "Resolve Commit To Release"
|
||||
id: resolve-commit-sha
|
||||
run: |
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Current Version Number"
|
||||
id: version-number-sources
|
||||
run: |
|
||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||
|
||||
- name: "Get Current Date"
|
||||
id: current-date
|
||||
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Generate Nightly Release Version Number"
|
||||
id: nightly-release-version
|
||||
run: |
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
||||
echo "number=$number" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||
|
||||
- name: "Set Release Branch"
|
||||
id: release-branch
|
||||
run: |
|
||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
log-outputs-aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] Log Outputs"
|
||||
run: |
|
||||
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
|
||||
release-github-pypi:
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: true
|
||||
nightly_release: true
|
||||
secrets: inherit
|
||||
30
.github/workflows/release-branch-tests.yml
vendored
30
.github/workflows/release-branch-tests.yml
vendored
@@ -28,7 +28,33 @@ on:
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
fetch-latest-branches:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
latest-branches: ${{ steps.get-latest-branches.outputs.repo-branches }}
|
||||
|
||||
steps:
|
||||
- name: "Fetch dbt-core Latest Branches"
|
||||
uses: dbt-labs/actions/fetch-repo-branches@v1.1.1
|
||||
id: get-latest-branches
|
||||
with:
|
||||
repo_name: ${{ github.event.repository.name }}
|
||||
organization: "dbt-labs"
|
||||
pat: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch_protected_branches_only: true
|
||||
regex: "^1.[0-9]+.latest$"
|
||||
perform_match_method: "match"
|
||||
retries: 3
|
||||
|
||||
- name: "[ANNOTATION] ${{ github.event.repository.name }} - branches to test"
|
||||
run: |
|
||||
title="${{ github.event.repository.name }} - branches to test"
|
||||
message="The workflow will run tests for the following branches of the ${{ github.event.repository.name }} repo: ${{ steps.get-latest-branches.outputs.repo-branches }}"
|
||||
echo "::notice $title::$message"
|
||||
|
||||
kick-off-ci:
|
||||
needs: [fetch-latest-branches]
|
||||
name: Kick-off CI
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -39,7 +65,9 @@ jobs:
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
|
||||
branch: ${{ fromJSON(needs.fetch-latest-branches.outputs.latest-branches) }}
|
||||
include:
|
||||
- branch: 'main'
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
|
||||
339
.github/workflows/release.yml
vendored
339
.github/workflows/release.yml
vendored
@@ -1,24 +1,110 @@
|
||||
# **what?**
|
||||
# Take the given commit, run unit tests specifically on that sha, build and
|
||||
# package it, and then release to GitHub and PyPi with that specific build
|
||||
|
||||
# Release workflow provides the following steps:
|
||||
# - checkout the given commit;
|
||||
# - validate version in sources and changelog file for given version;
|
||||
# - bump the version and generate a changelog if needed;
|
||||
# - merge all changes to the target branch if needed;
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
|
||||
#
|
||||
# **when?**
|
||||
# This will only run manually with a given sha and version
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPi
|
||||
name: Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: 'The last commit sha in the release'
|
||||
required: true
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: 'The release version number (i.e. 1.0.0b1)'
|
||||
required: true
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
@@ -28,175 +114,116 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
name: Unit test
|
||||
|
||||
log-inputs:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||
echo The branch to release from: ${{ inputs.target_branch }}
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Build script path: ${{ inputs.build_script_path }}
|
||||
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||
echo Package test command: ${{ inputs.package_test_command }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ inputs.sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets: inherit
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: Print variables
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: ${{ inputs.build_script_path }}
|
||||
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||
package_test_command: ${{ inputs.package_test_command }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: |
|
||||
dist/
|
||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
||||
|
||||
test-build:
|
||||
name: verify packages
|
||||
|
||||
needs: [build, unit]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
secrets:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: test-build
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: '.'
|
||||
|
||||
# Need to set an output variable because env variables can't be taken as input
|
||||
# This is needed for the next step with releasing to GitHub
|
||||
- name: Find release type
|
||||
id: release_type
|
||||
env:
|
||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||
run: |
|
||||
echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Creating GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: dbt-core v${{github.event.inputs.version_number}}
|
||||
tag_name: v${{github.event.inputs.version_number}}
|
||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
||||
target_commitish: ${{github.event.inputs.sha}}
|
||||
body: |
|
||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
||||
files: |
|
||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
pypi-release:
|
||||
name: Pypi release
|
||||
name: PyPI Release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
needs: [github-release]
|
||||
|
||||
needs: github-release
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||
|
||||
environment: PypiProd
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: 'dist'
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
- name: Publish distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
secrets:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
|
||||
needs:
|
||||
[
|
||||
bump-version-generate-changelog,
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
|
||||
@@ -30,6 +30,8 @@ jobs:
|
||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||
# tells integration tests to output into json format
|
||||
DBT_LOG_FORMAT: "json"
|
||||
# tell eventmgr to convert logging events into bytes
|
||||
DBT_TEST_BINARY_SERIALIZATION: "true"
|
||||
# Additional test users
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
|
||||
107
.github/workflows/version-bump.yml
vendored
107
.github/workflows/version-bump.yml
vendored
@@ -20,106 +20,9 @@ on:
|
||||
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo "all variables defined as inputs"
|
||||
echo The version_number: ${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
|
||||
- name: Add Homebrew to PATH
|
||||
run: |
|
||||
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Homebrew packages
|
||||
run: |
|
||||
brew install pre-commit
|
||||
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
||||
brew install changie
|
||||
|
||||
- name: Audit Version and Parse Into Parts
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1
|
||||
with:
|
||||
version: ${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Set branch value
|
||||
id: variables
|
||||
run: |
|
||||
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
git push origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
|
||||
- name: Bump version
|
||||
run: |
|
||||
source env/bin/activate
|
||||
pip install -r dev-requirements.txt
|
||||
env/bin/bumpversion --allow-dirty --new-version ${{ github.event.inputs.version_number }} major
|
||||
git status
|
||||
|
||||
- name: Run changie
|
||||
run: |
|
||||
if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
|
||||
then
|
||||
changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'
|
||||
else
|
||||
changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases
|
||||
fi
|
||||
changie merge
|
||||
git status
|
||||
|
||||
# this step will fail on whitespace errors but also correct them
|
||||
- name: Remove trailing whitespace
|
||||
continue-on-error: true
|
||||
run: |
|
||||
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/*
|
||||
git status
|
||||
|
||||
# this step will fail on newline errors but also correct them
|
||||
- name: Removing extra newlines
|
||||
continue-on-error: true
|
||||
run: |
|
||||
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/*
|
||||
git status
|
||||
|
||||
- name: Commit version bump to branch
|
||||
uses: EndBug/add-and-commit@v7
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'
|
||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{github.ref}}
|
||||
title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'
|
||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
version_bump_and_changie:
|
||||
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
secrets: inherit # ok since what we are calling is internally maintained
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -9,6 +9,7 @@ __pycache__/
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env*/
|
||||
.mf_dbt_venv
|
||||
dbt_env/
|
||||
build/
|
||||
!core/dbt/docs/build
|
||||
@@ -51,6 +52,7 @@ coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
test.env
|
||||
makefile.test.env
|
||||
*.pytest_cache/
|
||||
|
||||
|
||||
|
||||
82
CHANGELOG.md
82
CHANGELOG.md
@@ -5,6 +5,88 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.5.0-b1 - February 17, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Data type constraints are now native to SQL table materializations. Enforce columns are specific data types and not null depending on database functionality. ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
- Have dbt debug spit out structured json logs with flags enabled. ([#5353](https://github.com/dbt-labs/dbt-core/issues/5353))
|
||||
- add adapter_response to dbt test and freshness result ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- Improve error message for packages missing `dbt_project.yml` ([#6663](https://github.com/dbt-labs/dbt-core/issues/6663))
|
||||
- Adjust makefile to have clearer instructions for CI env var changes. ([#6689](https://github.com/dbt-labs/dbt-core/issues/6689))
|
||||
- Stand-alone Python module for PostgresColumn ([#6772](https://github.com/dbt-labs/dbt-core/issues/6772))
|
||||
- Exposure owner requires one of name or email keys, and accepts additional arbitrary keys ([#6833](https://github.com/dbt-labs/dbt-core/issues/6833))
|
||||
- Parse 'group' resource ([#6921](https://github.com/dbt-labs/dbt-core/issues/6921))
|
||||
|
||||
### Fixes
|
||||
|
||||
- add merge_exclude_columns adapter tests ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||
- Reapply logging fixes which were accidentally reverted ([#6936](https://github.com/dbt-labs/dbt-core/issues/6936))
|
||||
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||
- Readd initialization events, --log-cache-events in new CLI ([#6933](https://github.com/dbt-labs/dbt-core/issues/6933))
|
||||
- Fix previous state tests and disabled exposures, metrics ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752), [#6753](https://github.com/dbt-labs/dbt-core/issues/6753))
|
||||
- Make use of hashlib.md5() FIPS compliant ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
|
||||
### Docs
|
||||
|
||||
- update link to installation instructions ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||
- Searchable column descriptions ([dbt-docs/#140](https://github.com/dbt-labs/dbt-docs/issues/140), [dbt-docs/#322](https://github.com/dbt-labs/dbt-docs/issues/322), [dbt-docs/#369](https://github.com/dbt-labs/dbt-docs/issues/369))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- [CT-921] dbt compile works in click ([#5545](https://github.com/dbt-labs/dbt-core/issues/5545))
|
||||
- Fix use of ConnectionReused logging event ([#6168](https://github.com/dbt-labs/dbt-core/issues/6168))
|
||||
- Port docs tests to pytest ([#6573](https://github.com/dbt-labs/dbt-core/issues/6573))
|
||||
- Update deprecated github action command ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- dbt snapshot works in click ([#5554](https://github.com/dbt-labs/dbt-core/issues/5554))
|
||||
- dbt list working with click ([#5549](https://github.com/dbt-labs/dbt-core/issues/5549))
|
||||
- Add dbt run-operation to click CLI ([#5552](https://github.com/dbt-labs/dbt-core/issues/5552))
|
||||
- dbt build working with new click framework ([#5541](https://github.com/dbt-labs/dbt-core/issues/5541))
|
||||
- dbt docs generate works with new click framework ([#5543](https://github.com/dbt-labs/dbt-core/issues/5543))
|
||||
- Replaced the EmptyLine event with a more general Formatting event, and added a Note event. ([#6481](https://github.com/dbt-labs/dbt-core/issues/6481))
|
||||
- Small optimization on manifest parsing benefitting large DAGs ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- Revised and simplified various structured logging events ([#6664](https://github.com/dbt-labs/dbt-core/issues/6664), [#6665](https://github.com/dbt-labs/dbt-core/issues/6665), [#6666](https://github.com/dbt-labs/dbt-core/issues/6666))
|
||||
- dbt init works with click ([#5548](https://github.com/dbt-labs/dbt-core/issues/5548))
|
||||
- [CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands ([#5544](https://github.com/dbt-labs/dbt-core/issues/5544), [#6722](https://github.com/dbt-labs/dbt-core/issues/6722))
|
||||
- Migrate debug task to click ([#5546](https://github.com/dbt-labs/dbt-core/issues/5546))
|
||||
- Optimized GraphQueue to remove graph analysis bottleneck in large dags. ([#6759](https://github.com/dbt-labs/dbt-core/issues/6759))
|
||||
- Implement --version for click cli ([#6757](https://github.com/dbt-labs/dbt-core/issues/6757))
|
||||
- [CT-1841] Convert custom target test to Pytest ([#6638](https://github.com/dbt-labs/dbt-core/issues/6638))
|
||||
- Remove BigQuery-specific btye abbreviations ([#6741](https://github.com/dbt-labs/dbt-core/issues/6741))
|
||||
- warn_error/warn_error_options mutual exclusivity in click ([#6579](https://github.com/dbt-labs/dbt-core/issues/6579))
|
||||
- Enables the new Click Cli on the commandline! 🚀 ([#6784](https://github.com/dbt-labs/dbt-core/issues/6784))
|
||||
- Lazily call --version ([#6812](https://github.com/dbt-labs/dbt-core/issues/6812))
|
||||
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
- flags.THREADS defaults to None ([#6887](https://github.com/dbt-labs/dbt-core/issues/6887))
|
||||
- Fixing target type exposure error ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- Test binary serialization of logging events ([#6852](https://github.com/dbt-labs/dbt-core/issues/6852))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump ubuntu from 22.04 to 23.04 ([#6865](https://github.com/dbt-labs/dbt-core/pull/6865))
|
||||
- Revert hoisting dbt.cli.main into the dbt.name namespace ([#](https://github.com/dbt-labs/dbt-core/pull/))
|
||||
|
||||
### Contributors
|
||||
- [@aezomz](https://github.com/aezomz) ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- [@boxysean](https://github.com/boxysean) ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- [@davidbloss](https://github.com/davidbloss) ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||
- [@nielspardon](https://github.com/nielspardon) ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
- [@ryancharris](https://github.com/ryancharris) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# See `/docker` for a generic and production-ready docker file
|
||||
##
|
||||
|
||||
FROM ubuntu:22.04
|
||||
FROM ubuntu:23.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
|
||||
34
Makefile
34
Makefile
@@ -6,18 +6,26 @@ ifeq ($(USE_DOCKER),true)
|
||||
DOCKER_CMD := docker-compose run --rm test
|
||||
endif
|
||||
|
||||
LOGS_DIR := ./logs
|
||||
#
|
||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||
# DBT_TEST_USER_1=user
|
||||
# LOG_DIR="dir with a space in it"
|
||||
#
|
||||
# Warn: Restrict each line to one variable only.
|
||||
#
|
||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||
include ./makefile.test.env
|
||||
endif
|
||||
|
||||
# Optional flag to invoke tests using our CI env.
|
||||
# But we always want these active for structured
|
||||
# log testing.
|
||||
CI_FLAGS =\
|
||||
DBT_TEST_USER_1=dbt_test_user_1\
|
||||
DBT_TEST_USER_2=dbt_test_user_2\
|
||||
DBT_TEST_USER_3=dbt_test_user_3\
|
||||
RUSTFLAGS="-D warnings"\
|
||||
LOG_DIR=./logs\
|
||||
DBT_LOG_FORMAT=json
|
||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||
|
||||
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@@ -66,7 +74,7 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
@\
|
||||
$(if $(USE_CI_FLAGS), $(CI_FLAGS)) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
@@ -76,9 +84,9 @@ integration-fail-fast: .env ## Runs postgres integration tests with py-integrati
|
||||
.PHONY: interop
|
||||
interop: clean
|
||||
@\
|
||||
mkdir $(LOGS_DIR) && \
|
||||
mkdir $(LOG_DIR) && \
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||
LOG_DIR=$(LOGS_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
|
||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Join the dbt Community
|
||||
|
||||
@@ -17,7 +17,6 @@ from typing import (
|
||||
Iterator,
|
||||
Set,
|
||||
)
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
@@ -54,7 +53,7 @@ from dbt.events.types import (
|
||||
CodeExecutionStatus,
|
||||
CatalogGenerationError,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
@@ -943,7 +942,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Dict[str, Any] = None,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> agate.Table:
|
||||
) -> AttrDict:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
|
||||
:param macro_name: The name of the macro to execute.
|
||||
@@ -1028,7 +1027,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest)
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
@@ -1060,7 +1059,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Dict[str, Any]:
|
||||
) -> Tuple[AdapterResponse, Dict[str, Any]]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1069,7 +1068,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
}
|
||||
|
||||
# run the macro
|
||||
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||
# the current time according to the db.
|
||||
if len(table) != 1 or len(table[0]) != 2:
|
||||
@@ -1083,11 +1083,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
return {
|
||||
freshness = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
|
||||
@@ -7,9 +7,9 @@ from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import Project
|
||||
from dbt.config.project import PartialProject
|
||||
|
||||
partial = Project.partial_load(include_path)
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
@@ -17,7 +17,7 @@ from dbt.exceptions import (
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
import dbt.flags as flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.utils import lowercase
|
||||
|
||||
|
||||
@@ -319,6 +319,7 @@ class RelationsCache:
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
flags = get_flags()
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
@@ -456,7 +457,7 @@ class RelationsCache:
|
||||
ref_key_2=_make_msg_from_ref_key(new),
|
||||
)
|
||||
)
|
||||
|
||||
flags = get_flags()
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from .main import cli as dbt_cli # noqa
|
||||
|
||||
16
core/dbt/cli/context.py
Normal file
16
core/dbt/cli/context.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import click
|
||||
from typing import Optional
|
||||
|
||||
from dbt.cli.main import cli as dbt
|
||||
|
||||
|
||||
def make_context(args, command=dbt) -> Optional[click.Context]:
|
||||
try:
|
||||
ctx = command.make_context(command.name, args)
|
||||
except click.exceptions.Exit:
|
||||
return None
|
||||
|
||||
ctx.invoked_subcommand = ctx.protected_args[0] if ctx.protected_args else None
|
||||
ctx.obj = {}
|
||||
|
||||
return ctx
|
||||
20
core/dbt/cli/example.py
Normal file
20
core/dbt/cli/example.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from dbt.cli.main import dbtRunner
|
||||
from dbt.config.runtime import load_profile, load_project
|
||||
|
||||
if __name__ == "__main__":
|
||||
project_dir = "/Users/chenyuli/git/jaffle_shop"
|
||||
cli_args = ["run", "--project-dir", project_dir]
|
||||
|
||||
# initialize the dbt runner
|
||||
dbt = dbtRunner()
|
||||
# run the command
|
||||
res, success = dbt.invoke(cli_args)
|
||||
|
||||
# preload profile and project
|
||||
profile = load_profile(project_dir, {}, "testing-postgres")
|
||||
project = load_project(project_dir, False, profile, {})
|
||||
|
||||
# initialize the runner with pre-loaded profile and project, you can also pass in a preloaded manifest
|
||||
dbt = dbtRunner(profile=profile, project=project)
|
||||
# run the command, this will use the pre-loaded profile and project instead of loading
|
||||
res, success = dbt.invoke(cli_args)
|
||||
@@ -1,44 +1,204 @@
|
||||
# TODO Move this to /core/dbt/flags.py when we're ready to break things
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from importlib import import_module
|
||||
from multiprocessing import get_context
|
||||
from pprint import pformat as pf
|
||||
from typing import Set, List
|
||||
|
||||
from click import Context, get_current_context, BadOptionUsage
|
||||
from click.core import ParameterSource, Command, Group
|
||||
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.project import UserConfig
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
from dbt.cli.resolvers import default_project_dir, default_log_path
|
||||
|
||||
from click import get_current_context
|
||||
|
||||
if os.name != "nt":
|
||||
# https://bugs.python.org/issue41567
|
||||
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
|
||||
|
||||
# TODO anything that has a default in params should be removed here?
|
||||
# Or maybe only the ones that's in the root click group
|
||||
FLAGS_DEFAULTS = {
|
||||
"INDIRECT_SELECTION": "eager",
|
||||
"TARGET_PATH": None,
|
||||
# cli args without user_config or env var option
|
||||
"FULL_REFRESH": False,
|
||||
"STRICT_MODE": False,
|
||||
"STORE_FAILURES": False,
|
||||
}
|
||||
|
||||
|
||||
# For backwards compatability, some params are defined across multiple levels,
|
||||
# Top-level value should take precedence.
|
||||
# e.g. dbt --target-path test2 run --target-path test2
|
||||
EXPECTED_DUPLICATE_PARAMS = [
|
||||
"full_refresh",
|
||||
"target_path",
|
||||
"version_check",
|
||||
"fail_fast",
|
||||
"indirect_selection",
|
||||
"store_failures",
|
||||
]
|
||||
|
||||
|
||||
def convert_config(config_name, config_value):
|
||||
# This function should take care of converting the values from config and original
|
||||
# set_from_args to the correct type
|
||||
ret = config_value
|
||||
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
|
||||
ret = WarnErrorOptions(
|
||||
include=config_value.get("include", []), exclude=config_value.get("exclude", [])
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
def args_to_context(args: List[str]) -> Context:
|
||||
"""Convert a list of args to a click context with proper hierarchy for dbt commands"""
|
||||
from dbt.cli.main import cli
|
||||
|
||||
cli_ctx = cli.make_context(cli.name, args)
|
||||
# args would get converted during make context
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# handle source and docs group
|
||||
if type(sub_command) == Group:
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
|
||||
assert type(sub_command) == Command
|
||||
sub_command_ctx = sub_command.make_context(sub_command_name, args)
|
||||
sub_command_ctx.parent = cli_ctx
|
||||
return sub_command_ctx
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Flags:
|
||||
def __init__(self, ctx=None) -> None:
|
||||
def __init__(self, ctx: Context = None, user_config: UserConfig = None) -> None:
|
||||
|
||||
# set the default flags
|
||||
for key, value in FLAGS_DEFAULTS.items():
|
||||
object.__setattr__(self, key, value)
|
||||
|
||||
if ctx is None:
|
||||
ctx = get_current_context()
|
||||
|
||||
def assign_params(ctx):
|
||||
def assign_params(ctx, params_assigned_from_default):
|
||||
"""Recursively adds all click params to flag object"""
|
||||
for param_name, param_value in ctx.params.items():
|
||||
# TODO: this is to avoid duplicate params being defined in two places (version_check in run and cli)
|
||||
# However this is a bit of a hack and we should find a better way to do this
|
||||
|
||||
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
|
||||
# when using frozen dataclasses.
|
||||
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
|
||||
if hasattr(self, param_name):
|
||||
raise Exception(f"Duplicate flag names found in click command: {param_name}")
|
||||
object.__setattr__(self, param_name.upper(), param_value)
|
||||
if ctx.parent:
|
||||
assign_params(ctx.parent)
|
||||
if hasattr(self, param_name.upper()):
|
||||
if param_name not in EXPECTED_DUPLICATE_PARAMS:
|
||||
raise Exception(
|
||||
f"Duplicate flag names found in click command: {param_name}"
|
||||
)
|
||||
else:
|
||||
# Expected duplicate param from multi-level click command (ex: dbt --full_refresh run --full_refresh)
|
||||
# Overwrite user-configured param with value from parent context
|
||||
if ctx.get_parameter_source(param_name) != ParameterSource.DEFAULT:
|
||||
object.__setattr__(self, param_name.upper(), param_value)
|
||||
else:
|
||||
object.__setattr__(self, param_name.upper(), param_value)
|
||||
if ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT:
|
||||
params_assigned_from_default.add(param_name)
|
||||
|
||||
assign_params(ctx)
|
||||
if ctx.parent:
|
||||
assign_params(ctx.parent, params_assigned_from_default)
|
||||
|
||||
params_assigned_from_default = set() # type: Set[str]
|
||||
assign_params(ctx, params_assigned_from_default)
|
||||
|
||||
# Get the invoked command flags
|
||||
invoked_subcommand_name = (
|
||||
ctx.invoked_subcommand if hasattr(ctx, "invoked_subcommand") else None
|
||||
)
|
||||
if invoked_subcommand_name is not None:
|
||||
invoked_subcommand = getattr(import_module("dbt.cli.main"), invoked_subcommand_name)
|
||||
invoked_subcommand.allow_extra_args = True
|
||||
invoked_subcommand.ignore_unknown_options = True
|
||||
invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv)
|
||||
assign_params(invoked_subcommand_ctx, params_assigned_from_default)
|
||||
|
||||
if not user_config:
|
||||
profiles_dir = getattr(self, "PROFILES_DIR", None)
|
||||
user_config = read_user_config(profiles_dir) if profiles_dir else None
|
||||
|
||||
# Overwrite default assignments with user config if available
|
||||
if user_config:
|
||||
param_assigned_from_default_copy = params_assigned_from_default.copy()
|
||||
for param_assigned_from_default in params_assigned_from_default:
|
||||
user_config_param_value = getattr(user_config, param_assigned_from_default, None)
|
||||
if user_config_param_value is not None:
|
||||
object.__setattr__(
|
||||
self,
|
||||
param_assigned_from_default.upper(),
|
||||
convert_config(param_assigned_from_default, user_config_param_value),
|
||||
)
|
||||
param_assigned_from_default_copy.remove(param_assigned_from_default)
|
||||
params_assigned_from_default = param_assigned_from_default_copy
|
||||
|
||||
# Hard coded flags
|
||||
object.__setattr__(self, "WHICH", ctx.info_name)
|
||||
object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name)
|
||||
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
|
||||
|
||||
# Support console DO NOT TRACK initiave
|
||||
if os.getenv("DO_NOT_TRACK", "").lower() in (1, "t", "true", "y", "yes"):
|
||||
object.__setattr__(self, "ANONYMOUS_USAGE_STATS", False)
|
||||
# Apply the lead/follow relationship between some parameters
|
||||
self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default)
|
||||
self._override_if_set("LOG_LEVEL", "LOG_LEVEL_FILE", params_assigned_from_default)
|
||||
self._override_if_set("LOG_FORMAT", "LOG_FORMAT_FILE", params_assigned_from_default)
|
||||
|
||||
# Default LOG_PATH from PROJECT_DIR, if available.
|
||||
if getattr(self, "LOG_PATH", None) is None:
|
||||
project_dir = getattr(self, "PROJECT_DIR", default_project_dir())
|
||||
version_check = getattr(self, "VERSION_CHECK", True)
|
||||
object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check))
|
||||
|
||||
# Support console DO NOT TRACK initiative
|
||||
if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"):
|
||||
object.__setattr__(self, "SEND_ANONYMOUS_USAGE_STATS", False)
|
||||
|
||||
# Check mutual exclusivity once all flags are set
|
||||
self._assert_mutually_exclusive(
|
||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||
)
|
||||
|
||||
# Support lower cased access for legacy code
|
||||
params = set(
|
||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||
)
|
||||
for param in params:
|
||||
object.__setattr__(self, param.lower(), getattr(self, param))
|
||||
|
||||
# If the value of the lead parameter was set explicitly, apply the value to follow,
|
||||
# unless follow was also set explicitly.
|
||||
def _override_if_set(self, lead: str, follow: str, defaulted: Set[str]) -> None:
|
||||
if lead.lower() not in defaulted and follow.lower() in defaulted:
|
||||
object.__setattr__(self, follow.upper(), getattr(self, lead.upper(), None))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(pf(self.__dict__))
|
||||
|
||||
def _assert_mutually_exclusive(
|
||||
self, params_assigned_from_default: Set[str], group: List[str]
|
||||
) -> None:
|
||||
"""
|
||||
Ensure no elements from group are simultaneously provided by a user, as inferred from params_assigned_from_default.
|
||||
Raises click.UsageError if any two elements from group are simultaneously provided by a user.
|
||||
"""
|
||||
set_flag = None
|
||||
for flag in group:
|
||||
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||
if flag_set_by_user and set_flag:
|
||||
raise BadOptionUsage(
|
||||
flag.lower(), f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||
)
|
||||
elif flag_set_by_user:
|
||||
set_flag = flag
|
||||
|
||||
@@ -1,22 +1,73 @@
|
||||
import inspect # This is temporary for RAT-ing
|
||||
from copy import copy
|
||||
from pprint import pformat as pf # This is temporary for RAT-ing
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
import click
|
||||
from dbt.adapters.factory import adapter_management
|
||||
from dbt.cli import params as p
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.profiler import profiler
|
||||
from dbt.cli import requires, params as p
|
||||
from dbt.config.project import Project
|
||||
from dbt.config.profile import Profile
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.task.clean import CleanTask
|
||||
from dbt.task.compile import CompileTask
|
||||
from dbt.task.deps import DepsTask
|
||||
from dbt.task.debug import DebugTask
|
||||
from dbt.task.run import RunTask
|
||||
from dbt.task.serve import ServeTask
|
||||
from dbt.task.test import TestTask
|
||||
from dbt.task.snapshot import SnapshotTask
|
||||
from dbt.task.seed import SeedTask
|
||||
from dbt.task.list import ListTask
|
||||
from dbt.task.freshness import FreshnessTask
|
||||
from dbt.task.run_operation import RunOperationTask
|
||||
from dbt.task.build import BuildTask
|
||||
from dbt.task.generate import GenerateTask
|
||||
from dbt.task.init import InitTask
|
||||
|
||||
import importlib
|
||||
|
||||
metricflow_module = importlib.util.find_spec("metricflow")
|
||||
if metricflow_module is not None:
|
||||
from metricflow.cli.main import (
|
||||
list_metrics,
|
||||
list_dimensions,
|
||||
get_dimension_values,
|
||||
query,
|
||||
validate_configs
|
||||
)
|
||||
|
||||
|
||||
def cli_runner():
|
||||
# Alias "list" to "ls"
|
||||
ls = copy(cli.commands["list"])
|
||||
ls.hidden = True
|
||||
cli.add_command(ls, "ls")
|
||||
class dbtUsageException(Exception):
|
||||
pass
|
||||
|
||||
# Run the cli
|
||||
cli()
|
||||
|
||||
class dbtInternalException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# Programmatic invocation
|
||||
class dbtRunner:
|
||||
def __init__(
|
||||
self, project: Project = None, profile: Profile = None, manifest: Manifest = None
|
||||
):
|
||||
self.project = project
|
||||
self.profile = profile
|
||||
self.manifest = manifest
|
||||
|
||||
def invoke(self, args: List[str]) -> Tuple[Optional[List], bool]:
|
||||
try:
|
||||
dbt_ctx = cli.make_context(cli.name, args)
|
||||
dbt_ctx.obj = {
|
||||
"project": self.project,
|
||||
"profile": self.profile,
|
||||
"manifest": self.manifest,
|
||||
}
|
||||
return cli.invoke(dbt_ctx)
|
||||
except click.exceptions.Exit as e:
|
||||
# 0 exit code, expected for --version early exit
|
||||
if str(e) == "0":
|
||||
return [], True
|
||||
raise dbtInternalException(f"unhandled exit code {str(e)}")
|
||||
except (click.NoSuchOption, click.UsageError) as e:
|
||||
raise dbtUsageException(e.message)
|
||||
|
||||
|
||||
# dbt
|
||||
@@ -27,21 +78,27 @@ def cli_runner():
|
||||
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||
)
|
||||
@click.pass_context
|
||||
@p.anonymous_usage_stats
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@@ -52,21 +109,6 @@ def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
For more documentation on these commands, visit: docs.getdbt.com
|
||||
"""
|
||||
incomplete_flags = Flags()
|
||||
|
||||
# Profiling
|
||||
if incomplete_flags.RECORD_TIMING_INFO:
|
||||
ctx.with_resource(profiler(enable=True, outfile=incomplete_flags.RECORD_TIMING_INFO))
|
||||
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
# Version info
|
||||
if incomplete_flags.VERSION:
|
||||
click.echo(f"`version` called\n ctx.params: {pf(ctx.params)}")
|
||||
return
|
||||
else:
|
||||
del ctx.params["version"]
|
||||
|
||||
|
||||
# dbt build
|
||||
@@ -75,13 +117,14 @@ def cli(ctx, **kwargs):
|
||||
@p.defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.full_refresh
|
||||
@p.indirect_selection
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@p.state
|
||||
@@ -91,10 +134,22 @@ def cli(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def build(ctx, **kwargs):
|
||||
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = BuildTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt clean
|
||||
@@ -105,10 +160,24 @@ def build(ctx, **kwargs):
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def clean(ctx, **kwargs):
|
||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# mf
|
||||
@cli.group()
|
||||
@click.pass_context
|
||||
def mf(ctx, **kwargs):
|
||||
"""Used to house the metricflow metrics"""
|
||||
pass
|
||||
|
||||
|
||||
# dbt docs
|
||||
@@ -124,11 +193,11 @@ def docs(ctx, **kwargs):
|
||||
@p.compile_docs
|
||||
@p.defer
|
||||
@p.exclude
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.favor_state
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.target
|
||||
@@ -136,10 +205,22 @@ def docs(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest(write=False)
|
||||
def docs_generate(ctx, **kwargs):
|
||||
"""Generate the documentation website for your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = GenerateTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt docs serve
|
||||
@@ -152,10 +233,22 @@ def docs_generate(ctx, **kwargs):
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def docs_serve(ctx, **kwargs):
|
||||
"""Serve the documentation website for your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = ServeTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt compile
|
||||
@@ -163,13 +256,13 @@ def docs_serve(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.parse_only
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.target
|
||||
@@ -177,10 +270,23 @@ def docs_serve(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def compile(ctx, **kwargs):
|
||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the
|
||||
target/ directory."""
|
||||
task = CompileTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt debug
|
||||
@@ -188,15 +294,22 @@ def compile(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.config_dir
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.profiles_dir_exists_false
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
def debug(ctx, **kwargs):
|
||||
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = DebugTask(
|
||||
ctx.obj["flags"],
|
||||
None,
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt deps
|
||||
@@ -207,25 +320,36 @@ def debug(ctx, **kwargs):
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def deps(ctx, **kwargs):
|
||||
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt init
|
||||
@cli.command("init")
|
||||
@click.pass_context
|
||||
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||
@click.argument("project_name", required=False)
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.skip_profile_setup
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
def init(ctx, **kwargs):
|
||||
"""Initialize a new DBT project."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""Initialize a new dbt project."""
|
||||
task = InitTask(ctx.obj["flags"], None)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt list
|
||||
@@ -240,21 +364,39 @@ def init(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.raw_select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def list(ctx, **kwargs):
|
||||
"""List the resources in your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = ListTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Alias "list" to "ls"
|
||||
ls = copy(cli.commands["list"])
|
||||
ls.hidden = True
|
||||
cli.add_command(ls, "ls")
|
||||
|
||||
|
||||
# dbt parse
|
||||
@cli.command("parse")
|
||||
@click.pass_context
|
||||
@p.compile_parse
|
||||
@p.log_path
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -264,24 +406,29 @@ def list(ctx, **kwargs):
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@p.write_manifest
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest(write_perf_info=True)
|
||||
def parse(ctx, **kwargs):
|
||||
"""Parses the project and provides information on performance"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
# manifest generation and writing happens in @requires.manifest
|
||||
return None, True
|
||||
|
||||
|
||||
# dbt run
|
||||
@cli.command("run")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.favor_state
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.target
|
||||
@@ -289,25 +436,50 @@ def parse(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run(ctx, **kwargs):
|
||||
"""Compile SQL and execute against the current target database."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = RunTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt run operation
|
||||
@cli.command("run-operation")
|
||||
@click.pass_context
|
||||
@click.argument("macro")
|
||||
@p.args
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run_operation(ctx, **kwargs):
|
||||
"""Run the named macro with any supplied arguments."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = RunOperationTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt seed
|
||||
@@ -315,11 +487,10 @@ def run_operation(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@p.state
|
||||
@@ -328,10 +499,21 @@ def run_operation(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def seed(ctx, **kwargs):
|
||||
"""Load data from csv files into your data warehouse."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = SeedTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt snapshot
|
||||
@@ -339,19 +521,32 @@ def seed(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.exclude
|
||||
@p.models
|
||||
@p.favor_state
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.target
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def snapshot(ctx, **kwargs):
|
||||
"""Execute snapshots defined in your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = SnapshotTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt source
|
||||
@@ -365,20 +560,44 @@ def source(ctx, **kwargs):
|
||||
@source.command("freshness")
|
||||
@click.pass_context
|
||||
@p.exclude
|
||||
@p.models
|
||||
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.target
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def freshness(ctx, **kwargs):
|
||||
"""Snapshots the current freshness of the project's sources"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""check the current freshness of the project's sources"""
|
||||
task = FreshnessTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Alias "source freshness" to "snapshot-freshness"
|
||||
snapshot_freshness = copy(cli.commands["source"].commands["freshness"]) # type: ignore
|
||||
snapshot_freshness.hidden = True
|
||||
cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") # type: ignore
|
||||
if metricflow_module is not None:
|
||||
cli.add_command(list_metrics, "list-metrics")
|
||||
cli.add_command(list_dimensions, "list-dimensions")
|
||||
cli.add_command(get_dimension_values, "get-dimension-values")
|
||||
cli.add_command(query, "query")
|
||||
cli.add_command(validate_configs, "validate-configs")
|
||||
|
||||
|
||||
# dbt test
|
||||
@@ -387,12 +606,12 @@ def freshness(ctx, **kwargs):
|
||||
@p.defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.indirect_selection
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.store_failures
|
||||
@@ -401,12 +620,50 @@ def freshness(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def test(ctx, **kwargs):
|
||||
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = TestTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt validate
|
||||
# @cli.command("validate")
|
||||
# @click.pass_context
|
||||
# @p.args
|
||||
# @p.profile
|
||||
# @p.profiles_dir
|
||||
# @p.project_dir
|
||||
# @p.target
|
||||
# @p.vars
|
||||
# @requires.preflight
|
||||
# @requires.profile
|
||||
# @requires.project
|
||||
# @requires.runtime_config
|
||||
# @requires.manifest
|
||||
# def validate(ctx, **kwargs):
|
||||
# """Validates the semantic layer"""
|
||||
# task = ValidateTask(
|
||||
# ctx.obj["flags"],
|
||||
# ctx.obj["runtime_config"],
|
||||
# ctx.obj["manifest"],
|
||||
# )
|
||||
# results = task.run()
|
||||
# success = task.interpret_results(results)
|
||||
# return results, success
|
||||
|
||||
|
||||
# Support running as a module
|
||||
if __name__ == "__main__":
|
||||
cli_runner()
|
||||
cli()
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from click import ParamType
|
||||
import yaml
|
||||
from click import ParamType, Choice
|
||||
|
||||
from dbt.config.utils import parse_cli_vars
|
||||
from dbt.exceptions import ValidationError
|
||||
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
|
||||
@@ -14,8 +16,8 @@ class YAML(ParamType):
|
||||
if not isinstance(value, str):
|
||||
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
|
||||
try:
|
||||
return yaml.load(value, Loader=yaml.Loader)
|
||||
except yaml.parser.ParserError:
|
||||
return parse_cli_vars(value)
|
||||
except ValidationError:
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
@@ -25,6 +27,7 @@ class WarnErrorOptionsType(YAML):
|
||||
name = "WarnErrorOptionsType"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# this function is being used by param in click
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
|
||||
return WarnErrorOptions(
|
||||
@@ -46,3 +49,13 @@ class Truthy(ParamType):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class ChoiceTuple(Choice):
|
||||
name = "CHOICE_TUPLE"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
for value_item in value:
|
||||
super().convert(value_item, param, ctx)
|
||||
|
||||
return value
|
||||
|
||||
44
core/dbt/cli/options.py
Normal file
44
core/dbt/cli/options.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import click
|
||||
|
||||
|
||||
# Implementation from: https://stackoverflow.com/a/48394004
|
||||
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||
class MultiOption(click.Option):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||
nargs = kwargs.pop("nargs", -1)
|
||||
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||
super(MultiOption, self).__init__(*args, **kwargs)
|
||||
self._previous_parser_process = None
|
||||
self._eat_all_parser = None
|
||||
|
||||
def add_to_parser(self, parser, ctx):
|
||||
def parser_process(value, state):
|
||||
# method to hook to the parser.process
|
||||
done = False
|
||||
value = [value]
|
||||
if self.save_other_options:
|
||||
# grab everything up to the next option
|
||||
while state.rargs and not done:
|
||||
for prefix in self._eat_all_parser.prefixes:
|
||||
if state.rargs[0].startswith(prefix):
|
||||
done = True
|
||||
if not done:
|
||||
value.append(state.rargs.pop(0))
|
||||
else:
|
||||
# grab everything remaining
|
||||
value += state.rargs
|
||||
state.rargs[:] = []
|
||||
value = tuple(value)
|
||||
# call the actual process
|
||||
self._previous_parser_process(value, state)
|
||||
|
||||
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||
for name in self.opts:
|
||||
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||
if our_parser:
|
||||
self._eat_all_parser = our_parser
|
||||
self._previous_parser_process = our_parser.process
|
||||
our_parser.process = parser_process
|
||||
break
|
||||
return retval
|
||||
@@ -1,17 +1,15 @@
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import click
|
||||
from dbt.cli.option_types import YAML, WarnErrorOptionsType
|
||||
from dbt.cli.options import MultiOption
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
from dbt.version import get_version_information
|
||||
|
||||
|
||||
# TODO: The name (reflected in flags) is a correction!
|
||||
# The original name was `SEND_ANONYMOUS_USAGE_STATS` and used an env var called "DBT_SEND_ANONYMOUS_USAGE_STATS"
|
||||
# Both of which break existing naming conventions (doesn't match param flag).
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
anonymous_usage_stats = click.option(
|
||||
"--anonymous-usage-stats/--no-anonymous-usage-stats",
|
||||
envvar="DBT_ANONYMOUS_USAGE_STATS",
|
||||
# TODO: Rename this to meet naming conventions (the word "send" is redundant)
|
||||
send_anonymous_usage_stats = click.option(
|
||||
"--send-anonymous-usage-stats/--no-send-anonymous-usage-stats",
|
||||
envvar="DBT_SEND_ANONYMOUS_USAGE_STATS",
|
||||
help="Send anonymous usage stats to dbt Labs.",
|
||||
default=True,
|
||||
)
|
||||
@@ -39,7 +37,7 @@ cache_selected_only = click.option(
|
||||
compile_docs = click.option(
|
||||
"--compile/--no-compile",
|
||||
envvar=None,
|
||||
help="Wether or not to run 'dbt compile' as part of docs generation",
|
||||
help="Whether or not to run 'dbt compile' as part of docs generation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
@@ -80,7 +78,9 @@ enable_legacy_logger = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
|
||||
exclude = click.option(
|
||||
"--exclude", envvar=None, type=tuple, cls=MultiOption, help="Specify the nodes to exclude."
|
||||
)
|
||||
|
||||
fail_fast = click.option(
|
||||
"--fail-fast/--no-fail-fast",
|
||||
@@ -89,6 +89,12 @@ fail_fast = click.option(
|
||||
help="Stop execution on first failure.",
|
||||
)
|
||||
|
||||
favor_state = click.option(
|
||||
"--favor-state/--no-favor-state",
|
||||
envvar="DBT_FAVOR_STATE",
|
||||
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
|
||||
)
|
||||
|
||||
full_refresh = click.option(
|
||||
"--full-refresh",
|
||||
"-f",
|
||||
@@ -101,7 +107,7 @@ indirect_selection = click.option(
|
||||
"--indirect-selection",
|
||||
envvar="DBT_INDIRECT_SELECTION",
|
||||
help="Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.",
|
||||
type=click.Choice(["eager", "cautious"], case_sensitive=False),
|
||||
type=click.Choice(["eager", "cautious", "buildable"], case_sensitive=False),
|
||||
default="eager",
|
||||
)
|
||||
|
||||
@@ -115,15 +121,40 @@ log_format = click.option(
|
||||
"--log-format",
|
||||
envvar="DBT_LOG_FORMAT",
|
||||
help="Specify the log format, overriding the command's default.",
|
||||
type=click.Choice(["text", "json", "default"], case_sensitive=False),
|
||||
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||
default="default",
|
||||
)
|
||||
|
||||
log_format_file = click.option(
|
||||
"--log-format-file",
|
||||
envvar="DBT_LOG_FORMAT_FILE",
|
||||
help="Specify the file log format, overriding the command's default and the value of --log-format.",
|
||||
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||
default="debug",
|
||||
)
|
||||
|
||||
log_level = click.option(
|
||||
"--log-level",
|
||||
envvar="DBT_LOG_LEVEL",
|
||||
help="Specify the minimum severity of events that are logged.",
|
||||
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||
default="info",
|
||||
)
|
||||
|
||||
log_level_file = click.option(
|
||||
"--log-level-file",
|
||||
envvar="DBT_LOG_LEVEL_FILE",
|
||||
help="Specify the minimum severity of events that are logged to file, overriding the value of --log-level-file.",
|
||||
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||
default="debug",
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
||||
type=click.Path(),
|
||||
default=None,
|
||||
type=click.Path(resolve_path=True, path_type=Path),
|
||||
)
|
||||
|
||||
macro_debugging = click.option(
|
||||
@@ -132,21 +163,12 @@ macro_debugging = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
models = click.option(
|
||||
"-m",
|
||||
"-s",
|
||||
"models",
|
||||
envvar=None,
|
||||
help="Specify the nodes to include.",
|
||||
multiple=True,
|
||||
)
|
||||
|
||||
output = click.option(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="TODO: No current help text",
|
||||
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
|
||||
default="name",
|
||||
default="selector",
|
||||
)
|
||||
|
||||
output_keys = click.option(
|
||||
@@ -213,15 +235,24 @@ profiles_dir = click.option(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
default=default_profiles_dir(),
|
||||
default=default_profiles_dir,
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
# `dbt debug` uses this because it implements custom behaviour for non-existent profiles.yml directories
|
||||
profiles_dir_exists_false = click.option(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
default=default_profiles_dir,
|
||||
type=click.Path(exists=False),
|
||||
)
|
||||
|
||||
project_dir = click.option(
|
||||
"--project-dir",
|
||||
envvar=None,
|
||||
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
||||
default=default_project_dir(),
|
||||
default=default_project_dir,
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
@@ -240,10 +271,11 @@ record_timing_info = click.option(
|
||||
)
|
||||
|
||||
resource_type = click.option(
|
||||
"--resource-types",
|
||||
"--resource-type",
|
||||
envvar=None,
|
||||
help="TODO: No current help text",
|
||||
type=click.Choice(
|
||||
type=ChoiceTuple(
|
||||
[
|
||||
"metric",
|
||||
"source",
|
||||
@@ -258,9 +290,27 @@ resource_type = click.option(
|
||||
],
|
||||
case_sensitive=False,
|
||||
),
|
||||
default="default",
|
||||
cls=MultiOption,
|
||||
default=(),
|
||||
)
|
||||
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
"envvar": None,
|
||||
"help": "Specify the nodes to include.",
|
||||
"cls": MultiOption,
|
||||
"type": tuple,
|
||||
}
|
||||
|
||||
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
|
||||
selector = click.option(
|
||||
"--selector", envvar=None, help="The selector name to use, as defined in selectors.yml"
|
||||
)
|
||||
@@ -269,6 +319,19 @@ show = click.option(
|
||||
"--show", envvar=None, help="Show a sample of the loaded data in the terminal", is_flag=True
|
||||
)
|
||||
|
||||
# TODO: The env var is a correction!
|
||||
# The original env var was `DBT_TEST_SINGLE_THREADED`.
|
||||
# This broke the existing naming convention.
|
||||
# This will need to be communicated as a change to the community!
|
||||
#
|
||||
# N.B. This flag is only used for testing, hence it's hidden from help text.
|
||||
single_threaded = click.option(
|
||||
"--single-threaded/--no-single-threaded",
|
||||
envvar="DBT_SINGLE_THREADED",
|
||||
default=False,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
skip_profile_setup = click.option(
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
|
||||
)
|
||||
@@ -283,10 +346,10 @@ state = click.option(
|
||||
help="If set, use the given directory as the source for json files to compare with this project.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -319,14 +382,21 @@ threads = click.option(
|
||||
"--threads",
|
||||
envvar=None,
|
||||
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
||||
default=1,
|
||||
default=None,
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
use_colors = click.option(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Output is colorized by default and may also be set in a profile or at the command line.",
|
||||
help="Specify whether log output is colorized.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_colors_file = click.option(
|
||||
"--use-colors-file/--no-use-colors-file",
|
||||
envvar="DBT_USE_COLORS_FILE",
|
||||
help="Specify whether log file output is colorized overriding --use-colors/--no-use-colors.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
@@ -341,12 +411,26 @@ vars = click.option(
|
||||
envvar=None,
|
||||
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||
type=YAML(),
|
||||
default="{}",
|
||||
)
|
||||
|
||||
|
||||
# TODO: when legacy flags are deprecated use
|
||||
# click.version_option instead of a callback
|
||||
def _version_callback(ctx, _param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
click.echo(get_version_information())
|
||||
ctx.exit()
|
||||
|
||||
|
||||
version = click.option(
|
||||
"--version",
|
||||
callback=_version_callback,
|
||||
envvar=None,
|
||||
expose_value=False,
|
||||
help="Show version information",
|
||||
is_eager=True,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
@@ -362,13 +446,13 @@ warn_error = click.option(
|
||||
envvar="DBT_WARN_ERROR",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
default=None,
|
||||
flag_value=True,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
warn_error_options = click.option(
|
||||
"--warn-error-options",
|
||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||
default=None,
|
||||
default="{}",
|
||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||
type=WarnErrorOptionsType(),
|
||||
|
||||
180
core/dbt/cli/requires.py
Normal file
180
core/dbt/cli/requires.py
Normal file
@@ -0,0 +1,180 @@
|
||||
from dbt.version import installed as installed_version
|
||||
from dbt.adapters.factory import adapter_management, register_adapter
|
||||
from dbt.flags import set_flags, get_flag_dict
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||
from dbt.events.functions import setup_event_logger, fire_event, LOG_VERSION
|
||||
from dbt.events.types import MainReportVersion, MainReportArgs, MainTrackingUserState
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.parser.manifest import ManifestLoader, write_manifest
|
||||
from dbt.profiler import profiler
|
||||
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||
from dbt.utils import cast_dict_to_dict_of_strings
|
||||
|
||||
from click import Context
|
||||
from functools import update_wrapper
|
||||
|
||||
|
||||
def preflight(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
ctx.obj = ctx.obj or {}
|
||||
|
||||
# Flags
|
||||
flags = Flags(ctx)
|
||||
ctx.obj["flags"] = flags
|
||||
set_flags(flags)
|
||||
|
||||
# Tracking
|
||||
initialize_from_flags(flags.SEND_ANONYMOUS_USAGE_STATS, flags.PROFILES_DIR)
|
||||
ctx.with_resource(track_run(run_command=flags.WHICH))
|
||||
|
||||
# Logging
|
||||
# N.B. Legacy logger is not supported
|
||||
setup_event_logger(flags)
|
||||
|
||||
# Now that we have our logger, fire away!
|
||||
fire_event(MainReportVersion(version=str(installed_version), log_version=LOG_VERSION))
|
||||
flags_dict_str = cast_dict_to_dict_of_strings(get_flag_dict())
|
||||
fire_event(MainReportArgs(args=flags_dict_str))
|
||||
|
||||
if active_user is not None: # mypy appeasement, always true
|
||||
fire_event(MainTrackingUserState(user_state=active_user.state()))
|
||||
|
||||
# Profiling
|
||||
if flags.RECORD_TIMING_INFO:
|
||||
ctx.with_resource(profiler(enable=True, outfile=flags.RECORD_TIMING_INFO))
|
||||
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
# TODO: UnsetProfile is necessary for deps and clean to load a project.
|
||||
# This decorator and its usage can be removed once https://github.com/dbt-labs/dbt-core/issues/6257 is closed.
|
||||
def unset_profile(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
if ctx.obj.get("profile") is None:
|
||||
profile = UnsetProfile()
|
||||
ctx.obj["profile"] = profile
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def profile(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
if ctx.obj.get("profile") is None:
|
||||
flags = ctx.obj["flags"]
|
||||
# TODO: Generalize safe access to flags.THREADS:
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6259
|
||||
threads = getattr(flags, "THREADS", None)
|
||||
profile = load_profile(
|
||||
flags.PROJECT_DIR, flags.VARS, flags.PROFILE, flags.TARGET, threads
|
||||
)
|
||||
ctx.obj["profile"] = profile
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def project(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
if ctx.obj.get("project") is None:
|
||||
# TODO: Decouple target from profile, and remove the need for profile here:
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6257
|
||||
if not ctx.obj.get("profile"):
|
||||
raise DbtProjectError("profile required for project")
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
project = load_project(
|
||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
||||
)
|
||||
ctx.obj["project"] = project
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def runtime_config(func):
|
||||
"""A decorator used by click command functions for generating a runtime
|
||||
config given a profile and project.
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["profile", "project"]
|
||||
reqs = [ctx.obj.get(req_str) for req_str in req_strs]
|
||||
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile and project required for runtime_config")
|
||||
|
||||
ctx.obj["runtime_config"] = RuntimeConfig.from_parts(
|
||||
ctx.obj["project"],
|
||||
ctx.obj["profile"],
|
||||
ctx.obj["flags"],
|
||||
)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def manifest(*args0, write=True, write_perf_info=False):
|
||||
"""A decorator used by click command functions for generating a manifest
|
||||
given a profile, project, and runtime config. This also registers the adaper
|
||||
from the runtime config and conditionally writes the manifest to disc.
|
||||
"""
|
||||
|
||||
def outer_wrapper(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["profile", "project", "runtime_config"]
|
||||
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
||||
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile, project, and runtime_config required for manifest")
|
||||
|
||||
runtime_config = ctx.obj["runtime_config"]
|
||||
register_adapter(runtime_config)
|
||||
|
||||
# a manifest has already been set on the context, so don't overwrite it
|
||||
if ctx.obj.get("manifest") is None:
|
||||
manifest = ManifestLoader.get_full_manifest(
|
||||
runtime_config, write_perf_info=write_perf_info
|
||||
)
|
||||
|
||||
ctx.obj["manifest"] = manifest
|
||||
if write and ctx.obj["flags"].write_json:
|
||||
write_manifest(manifest, ctx.obj["runtime_config"].target_path)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
# if there are no args, the decorator was used without params @decorator
|
||||
# otherwise, the decorator was called with params @decorator(arg)
|
||||
if len(args0) == 0:
|
||||
return outer_wrapper
|
||||
return outer_wrapper(args0[0])
|
||||
@@ -1,11 +1,31 @@
|
||||
from pathlib import Path
|
||||
from dbt.config.project import PartialProject
|
||||
from dbt.exceptions import DbtProjectError
|
||||
|
||||
|
||||
def default_project_dir():
|
||||
def default_project_dir() -> Path:
|
||||
paths = list(Path.cwd().parents)
|
||||
paths.insert(0, Path.cwd())
|
||||
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
|
||||
|
||||
|
||||
def default_profiles_dir():
|
||||
def default_profiles_dir() -> Path:
|
||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||
|
||||
|
||||
def default_log_path(project_dir: Path, verify_version: bool = False) -> Path:
|
||||
"""If available, derive a default log path from dbt_project.yml. Otherwise, default to "logs".
|
||||
Known limitations:
|
||||
1. Using PartialProject here, so no jinja rendering of log-path.
|
||||
2. Programmatic invocations of the cli via dbtRunner may pass a Project object directly,
|
||||
which is not being taken into consideration here to extract a log-path.
|
||||
"""
|
||||
default_log_path = Path("logs")
|
||||
try:
|
||||
partial = PartialProject.from_project_root(str(project_dir), verify_version=verify_version)
|
||||
partial_log_path = partial.project_dict.get("log-path") or default_log_path
|
||||
default_log_path = Path(project_dir) / partial_log_path
|
||||
except DbtProjectError:
|
||||
pass
|
||||
|
||||
return default_log_path
|
||||
|
||||
@@ -40,7 +40,7 @@ from dbt.exceptions import (
|
||||
UndefinedCompilationError,
|
||||
UndefinedMacroError,
|
||||
)
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage
|
||||
|
||||
|
||||
@@ -99,8 +99,9 @@ class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):
|
||||
If the value is 'write', also write the files to disk.
|
||||
WARNING: This can write a ton of data if you aren't careful.
|
||||
"""
|
||||
if filename == "<template>" and flags.MACRO_DEBUGGING:
|
||||
write = flags.MACRO_DEBUGGING == "write"
|
||||
macro_debugging = get_flags().MACRO_DEBUGGING
|
||||
if filename == "<template>" and macro_debugging:
|
||||
write = macro_debugging == "write"
|
||||
filename = _linecache_inject(source, write)
|
||||
|
||||
return super()._compile(source, filename) # type: ignore
|
||||
|
||||
@@ -1,30 +1,32 @@
|
||||
import errno
|
||||
import functools
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import requests
|
||||
import stat
|
||||
from typing import Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
|
||||
from pathspec import PathSpec # type: ignore
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union
|
||||
|
||||
import dbt.exceptions
|
||||
import requests
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
SystemErrorRetrievingModTime,
|
||||
SystemCouldNotWrite,
|
||||
SystemErrorRetrievingModTime,
|
||||
SystemExecutingCmd,
|
||||
SystemStdOut,
|
||||
SystemStdErr,
|
||||
SystemReportReturnCode,
|
||||
)
|
||||
import dbt.exceptions
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
from pathspec import PathSpec # type: ignore
|
||||
|
||||
if sys.platform == "win32":
|
||||
from ctypes import WinDLL, c_bool
|
||||
@@ -106,12 +108,18 @@ def load_file_contents(path: str, strip: bool = True) -> str:
|
||||
return to_return
|
||||
|
||||
|
||||
def make_directory(path: str) -> None:
|
||||
@functools.singledispatch
|
||||
def make_directory(path=None) -> None:
|
||||
"""
|
||||
Make a directory and any intermediate directories that don't already
|
||||
exist. This function handles the case where two threads try to create
|
||||
a directory at once.
|
||||
"""
|
||||
raise DbtInternalError(f"Can not create directory from {type(path)} ")
|
||||
|
||||
|
||||
@make_directory.register
|
||||
def _(path: str) -> None:
|
||||
path = convert_path(path)
|
||||
if not os.path.exists(path):
|
||||
# concurrent writes that try to create the same dir can fail
|
||||
@@ -125,6 +133,11 @@ def make_directory(path: str) -> None:
|
||||
raise e
|
||||
|
||||
|
||||
@make_directory.register
|
||||
def _(path: Path) -> None:
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool:
|
||||
"""
|
||||
Make a file at `path` assuming that the directory it resides in already
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
|
||||
import argparse
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from dbt import flags
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
@@ -32,6 +33,7 @@ from dbt.events.contextvars import get_node_info
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -49,6 +51,7 @@ def print_compile_stats(stats):
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Entity: "entity",
|
||||
NodeType.Group: "group",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -88,15 +91,18 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[entity.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
for group in manifest.groups.values():
|
||||
stats[group.resource_type] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def _add_prepended_cte(prepended_ctes, new_cte):
|
||||
for cte in prepended_ctes:
|
||||
if cte.id == new_cte.id:
|
||||
if cte.id == new_cte.id and new_cte.sql:
|
||||
cte.sql = new_cte.sql
|
||||
return
|
||||
prepended_ctes.append(new_cte)
|
||||
if new_cte.sql:
|
||||
prepended_ctes.append(new_cte)
|
||||
|
||||
|
||||
def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
@@ -260,16 +266,18 @@ class Compiler:
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an unparsed node", model)
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
if len(model.extra_ctes) == 0:
|
||||
# SeedNodes don't have compilation attributes
|
||||
if not isinstance(model, SeedNode):
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
return (model, [])
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
@@ -278,7 +286,8 @@ class Compiler:
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
# ephemeral model. InjectedCTEs have a unique_id and sql.
|
||||
# extra_ctes start out with sql set to None, and the sql is set in this loop.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise DbtInternalError(
|
||||
@@ -291,23 +300,23 @@ class Compiler:
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise DbtInternalError(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
# This model has already been compiled and extra_ctes_injected, so it's been
|
||||
# through here before. We already checked above for extra_ctes_injected, but
|
||||
# checking again because updates maybe have happened in another thread.
|
||||
if cte_model.compiled is True and cte_model.extra_ctes_injected is True:
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
# Render the raw_code and set compiled to True
|
||||
cte_model = self._compile_code(cte_model, manifest, extra_context)
|
||||
# recursively call this method, sets extra_ctes_injected to True
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
# Write compiled SQL file
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
@@ -321,20 +330,21 @@ class Compiler:
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict(omit_none=True))
|
||||
manifest.update_node(model)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
return model, prepended_ctes
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
|
||||
# Sets compiled fields in the ManifestSQLNode passed in,
|
||||
# Sets compiled_code and compiled flag in the ManifestSQLNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_node(
|
||||
def _compile_code(
|
||||
self,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
@@ -343,24 +353,7 @@ class Compiler:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
data = node.to_dict(omit_none=True)
|
||||
data.update(
|
||||
{
|
||||
"compiled": False,
|
||||
"compiled_code": None,
|
||||
"extra_ctes_injected": False,
|
||||
"extra_ctes": [],
|
||||
}
|
||||
)
|
||||
|
||||
if node.language == ModelLanguage.python:
|
||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
||||
|
||||
# quoating seems like something very specific to sql so far
|
||||
# for all python implementations we are seeing there's no quating.
|
||||
# TODO try to find better way to do this, given that
|
||||
original_quoting = self.config.quoting
|
||||
self.config.quoting = {key: False for key in original_quoting.keys()}
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
|
||||
postfix = jinja.get_rendered(
|
||||
@@ -370,8 +363,6 @@ class Compiler:
|
||||
)
|
||||
# we should NOT jinja render the python model's 'raw code'
|
||||
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
# restore quoting settings in the end since context is lazy evaluated
|
||||
self.config.quoting = original_quoting
|
||||
|
||||
else:
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
@@ -383,11 +374,24 @@ class Compiler:
|
||||
|
||||
node.compiled = True
|
||||
|
||||
# relation_name is set at parse time, except for tests without store_failures,
|
||||
# but cli param can turn on store_failures, so we set here.
|
||||
if (
|
||||
node.resource_type == NodeType.Test
|
||||
and node.relation_name is None
|
||||
and node.is_relational
|
||||
):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
node.relation_name = relation_name
|
||||
|
||||
return node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
graph_path = os.path.join(self.config.target_path, filename)
|
||||
flags = get_flags()
|
||||
if flags.WRITE_JSON:
|
||||
linker.write_graph(graph_path, manifest)
|
||||
|
||||
@@ -489,7 +493,13 @@ class Compiler:
|
||||
|
||||
if write:
|
||||
self.write_graph_file(linker, manifest)
|
||||
print_compile_stats(stats)
|
||||
|
||||
# Do not print these for ListTask's
|
||||
if not (
|
||||
self.config.args.__class__ == argparse.Namespace
|
||||
and self.config.args.cls == list_task.ListTask
|
||||
):
|
||||
print_compile_stats(stats)
|
||||
|
||||
return Graph(linker.graph)
|
||||
|
||||
@@ -517,11 +527,11 @@ class Compiler:
|
||||
) -> ManifestSQLNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
the node into a compiled node, and then calls the
|
||||
RunTask.get_hook_sql. It calls '_compile_code' to render
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
if write:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, read_user_config # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
from .project import Project, IsFQNResource, PartialProject # noqa
|
||||
from .runtime import RuntimeConfig # noqa
|
||||
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
@@ -32,22 +32,6 @@ dbt encountered an error while trying to read your profiles.yml file.
|
||||
"""
|
||||
|
||||
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=flags.DEFAULT_PROFILES_DIR
|
||||
)
|
||||
|
||||
|
||||
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
path = os.path.join(profiles_dir, "profiles.yml")
|
||||
|
||||
@@ -197,10 +181,33 @@ class Profile(HasCredentials):
|
||||
args_profile_name: Optional[str],
|
||||
project_profile_name: Optional[str] = None,
|
||||
) -> str:
|
||||
# TODO: Duplicating this method as direct copy of the implementation in dbt.cli.resolvers
|
||||
# dbt.cli.resolvers implementation can't be used because it causes a circular dependency.
|
||||
# This should be removed and use a safe default access on the Flags module when
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6259 is closed.
|
||||
def default_profiles_dir():
|
||||
from pathlib import Path
|
||||
|
||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||
|
||||
profile_name = project_profile_name
|
||||
if args_profile_name is not None:
|
||||
profile_name = args_profile_name
|
||||
if profile_name is None:
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=default_profiles_dir()
|
||||
)
|
||||
raise DbtProjectError(NO_SUPPLIED_PROFILE_ERROR)
|
||||
return profile_name
|
||||
|
||||
@@ -401,11 +408,13 @@ class Profile(HasCredentials):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def render_from_args(
|
||||
def render(
|
||||
cls,
|
||||
args: Any,
|
||||
renderer: ProfileRenderer,
|
||||
project_profile_name: Optional[str],
|
||||
profile_name_override: Optional[str] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> "Profile":
|
||||
"""Given the raw profiles as read from disk and the name of the desired
|
||||
profile if specified, return the profile component of the runtime
|
||||
@@ -421,10 +430,9 @@ class Profile(HasCredentials):
|
||||
target could not be found.
|
||||
:returns Profile: The new Profile object.
|
||||
"""
|
||||
threads_override = getattr(args, "threads", None)
|
||||
target_override = getattr(args, "target", None)
|
||||
flags = get_flags()
|
||||
raw_profiles = read_profile(flags.PROFILES_DIR)
|
||||
profile_name = cls.pick_profile_name(getattr(args, "profile", None), project_profile_name)
|
||||
profile_name = cls.pick_profile_name(profile_name_override, project_profile_name)
|
||||
return cls.from_raw_profiles(
|
||||
raw_profiles=raw_profiles,
|
||||
profile_name=profile_name,
|
||||
|
||||
@@ -12,10 +12,10 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt import flags, deprecations
|
||||
from dbt.flags import get_flags
|
||||
from dbt import deprecations
|
||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
@@ -30,16 +30,16 @@ from dbt.graph import SelectionSpec
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.semver import VersionSpecifier, versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.utils import MultiDict, md5
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.config.selectors import SelectorDict
|
||||
from dbt.contracts.project import (
|
||||
Project as ProjectContract,
|
||||
SemverString,
|
||||
)
|
||||
from dbt.contracts.project import PackageConfig
|
||||
from dbt.contracts.project import PackageConfig, ProjectPackageMetadata
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from .renderer import DbtProjectYamlRenderer
|
||||
from .renderer import DbtProjectYamlRenderer, PackageRenderer
|
||||
from .selectors import (
|
||||
selector_config_from_data,
|
||||
selector_data_from_root,
|
||||
@@ -75,6 +75,11 @@ Validator Error:
|
||||
{error}
|
||||
"""
|
||||
|
||||
MISSING_DBT_PROJECT_ERROR = """\
|
||||
No dbt_project.yml found at expected path {path}
|
||||
Verify that each entry within packages.yml (and their transitive dependencies) contains a file named dbt_project.yml
|
||||
"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IsFQNResource(Protocol):
|
||||
@@ -132,11 +137,10 @@ def _all_source_paths(
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
# We need to turn a list of lists into just a list, then convert to a set to
|
||||
# get only unique elements, then back to a list
|
||||
return list(
|
||||
set(list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)))
|
||||
)
|
||||
paths = chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)
|
||||
# Strip trailing slashes since the path is the same even though the name is not
|
||||
stripped_paths = map(lambda s: s.rstrip("/"), paths)
|
||||
return list(set(stripped_paths))
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
@@ -156,16 +160,14 @@ def value_or(value: Optional[T], default: T) -> T:
|
||||
return value
|
||||
|
||||
|
||||
def _raw_project_from(project_root: str) -> Dict[str, Any]:
|
||||
def load_raw_project(project_root: str) -> Dict[str, Any]:
|
||||
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_yaml_filepath = os.path.join(project_root, "dbt_project.yml")
|
||||
|
||||
# get the project.yml contents
|
||||
if not path_exists(project_yaml_filepath):
|
||||
raise DbtProjectError(
|
||||
"no dbt_project.yml found at expected path {}".format(project_yaml_filepath)
|
||||
)
|
||||
raise DbtProjectError(MISSING_DBT_PROJECT_ERROR.format(path=project_yaml_filepath))
|
||||
|
||||
project_dict = _load_yaml(project_yaml_filepath)
|
||||
|
||||
@@ -289,6 +291,13 @@ class PartialProject(RenderComponents):
|
||||
exc.path = os.path.join(self.project_root, "dbt_project.yml")
|
||||
raise
|
||||
|
||||
def render_package_metadata(self, renderer: PackageRenderer) -> ProjectPackageMetadata:
|
||||
packages_data = renderer.render_data(self.packages_dict)
|
||||
packages_config = package_config_from_data(packages_data)
|
||||
if not self.project_name:
|
||||
raise DbtProjectError("Package dbt_project.yml must have a name!")
|
||||
return ProjectPackageMetadata(self.project_name, packages_config.packages)
|
||||
|
||||
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
||||
if deprecated_path in project_dict:
|
||||
if exp_path in project_dict:
|
||||
@@ -363,9 +372,13 @@ class PartialProject(RenderComponents):
|
||||
|
||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||
target_path: str = flag_or(flags.TARGET_PATH, cfg.target_path, "target")
|
||||
flags = get_flags()
|
||||
|
||||
flag_target_path = str(flags.TARGET_PATH) if flags.TARGET_PATH else None
|
||||
target_path: str = flag_or(flag_target_path, cfg.target_path, "target")
|
||||
log_path: str = str(flags.LOG_PATH)
|
||||
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = flag_or(flags.LOG_PATH, cfg.log_path, "logs")
|
||||
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
@@ -488,7 +501,7 @@ class PartialProject(RenderComponents):
|
||||
cls, project_root: str, *, verify_version: bool = False
|
||||
) -> "PartialProject":
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_dict = _raw_project_from(project_root)
|
||||
project_dict = load_raw_project(project_root)
|
||||
config_version = project_dict.get("config-version", 1)
|
||||
if config_version != 2:
|
||||
raise DbtProjectError(
|
||||
@@ -528,7 +541,7 @@ class VarProvider:
|
||||
@dataclass
|
||||
class Project:
|
||||
project_name: str
|
||||
version: Union[SemverString, float]
|
||||
version: Optional[Union[SemverString, float]]
|
||||
project_root: str
|
||||
profile_name: Optional[str]
|
||||
model_paths: List[str]
|
||||
@@ -664,11 +677,11 @@ class Project:
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
) -> "Project":
|
||||
partial = cls.partial_load(project_root, verify_version=verify_version)
|
||||
partial = PartialProject.from_project_root(project_root, verify_version=verify_version)
|
||||
return partial.render(renderer)
|
||||
|
||||
def hashed_name(self):
|
||||
return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
|
||||
return md5(self.project_name)
|
||||
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
|
||||
@@ -107,7 +107,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
if profile:
|
||||
self.ctx_obj = TargetContext(profile, cli_vars)
|
||||
self.ctx_obj = TargetContext(profile.to_target_dict(), cli_vars)
|
||||
else:
|
||||
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
||||
context = self.ctx_obj.to_dict()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import itertools
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -13,17 +13,18 @@ from typing import (
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.config.project import load_raw_project
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials, HasCredentials
|
||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||
from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
from dbt.exceptions import (
|
||||
ConfigContractBrokenError,
|
||||
DbtProjectError,
|
||||
@@ -31,14 +32,46 @@ from dbt.exceptions import (
|
||||
DbtRuntimeError,
|
||||
UninstalledPackagesFoundError,
|
||||
)
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project, PartialProject
|
||||
from .project import Project
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
|
||||
|
||||
def load_project(
|
||||
project_root: str,
|
||||
version_check: bool,
|
||||
profile: HasCredentials,
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
) -> Project:
|
||||
# get the project with all of the provided information
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = Project.from_project_root(
|
||||
project_root, project_renderer, verify_version=version_check
|
||||
)
|
||||
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
|
||||
|
||||
def load_profile(
|
||||
project_root: str,
|
||||
cli_vars: Dict[str, Any],
|
||||
profile_name_override: Optional[str] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> Profile:
|
||||
raw_project = load_raw_project(project_root)
|
||||
raw_profile_name = raw_project.get("profile")
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
profile_name = profile_renderer.render_value(raw_profile_name)
|
||||
profile = Profile.render(
|
||||
profile_renderer, profile_name, profile_name_override, target_override, threads_override
|
||||
)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
|
||||
|
||||
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
|
||||
@@ -62,6 +95,21 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
def __post_init__(self):
|
||||
self.validate()
|
||||
|
||||
@classmethod
|
||||
def get_profile(
|
||||
cls,
|
||||
project_root: str,
|
||||
cli_vars: Dict[str, Any],
|
||||
args: Any,
|
||||
) -> Profile:
|
||||
return load_profile(
|
||||
project_root,
|
||||
cli_vars,
|
||||
args.profile,
|
||||
args.target,
|
||||
args.threads,
|
||||
)
|
||||
|
||||
# Called by 'new_project' and 'from_args'
|
||||
@classmethod
|
||||
def from_parts(
|
||||
@@ -84,7 +132,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
.replace_dict(_project_quoting_dict(project, profile))
|
||||
).to_dict(omit_none=True)
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
@@ -150,11 +198,10 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
# load the new project and its packages. Don't pass cli variables.
|
||||
renderer = DbtProjectYamlRenderer(profile)
|
||||
|
||||
project = Project.from_project_root(
|
||||
project_root,
|
||||
renderer,
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
verify_version=bool(getattr(self.args, "VERSION_CHECK", True)),
|
||||
)
|
||||
|
||||
runtime_config = self.from_parts(
|
||||
@@ -190,64 +237,19 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
except ValidationError as e:
|
||||
raise ConfigContractBrokenError(e) from e
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
return Profile.render_from_args(args, profile_renderer, profile_name)
|
||||
|
||||
@classmethod
|
||||
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
profile = cls.collect_profile(args=args)
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = cls.collect_project(args=args, project_renderer=project_renderer)
|
||||
assert type(project) is Project
|
||||
return (project, profile)
|
||||
|
||||
@classmethod
|
||||
def collect_profile(
|
||||
cls: Type["RuntimeConfig"], args: Any, profile_name: Optional[str] = None
|
||||
) -> Profile:
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
if profile_name is None:
|
||||
# Note: only the named profile section is rendered here. The rest of the
|
||||
# profile is ignored.
|
||||
partial = cls.collect_project(args)
|
||||
assert type(partial) is PartialProject
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def collect_project(
|
||||
cls: Type["RuntimeConfig"],
|
||||
args: Any,
|
||||
project_renderer: Optional[DbtProjectYamlRenderer] = None,
|
||||
) -> Union[Project, PartialProject]:
|
||||
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
||||
if project_renderer is None:
|
||||
return partial
|
||||
else:
|
||||
project = partial.render(project_renderer)
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||
profile = cls.get_profile(
|
||||
project_root,
|
||||
cli_vars,
|
||||
args,
|
||||
)
|
||||
flags = get_flags()
|
||||
project = load_project(project_root, bool(flags.VERSION_CHECK), profile, cli_vars)
|
||||
return project, profile
|
||||
|
||||
# Called in main.py, lib.py, task/base.py
|
||||
@classmethod
|
||||
@@ -413,8 +415,8 @@ class UnsetCredentials(Credentials):
|
||||
return ()
|
||||
|
||||
|
||||
# This is used by UnsetProfileConfig, for commands which do
|
||||
# not require a profile, i.e. dbt deps and clean
|
||||
# This is used by commands which do not require
|
||||
# a profile, i.e. dbt deps and clean
|
||||
class UnsetProfile(Profile):
|
||||
def __init__(self):
|
||||
self.credentials = UnsetCredentials()
|
||||
@@ -433,184 +435,12 @@ class UnsetProfile(Profile):
|
||||
return Profile.__getattribute__(self, name)
|
||||
|
||||
|
||||
# This class is used by the dbt deps and clean commands, because they don't
|
||||
# require a functioning profile.
|
||||
@dataclass
|
||||
class UnsetProfileConfig(RuntimeConfig):
|
||||
"""This class acts a lot _like_ a RuntimeConfig, except if your profile is
|
||||
missing, any access to profile members results in an exception.
|
||||
"""
|
||||
|
||||
profile_name: str = field(repr=False)
|
||||
target_name: str = field(repr=False)
|
||||
|
||||
def __post_init__(self):
|
||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
||||
# `del` the attrs we don't want users touching.
|
||||
del self.profile_name
|
||||
del self.target_name
|
||||
# don't call super().__post_init__(), as that calls validate(), and
|
||||
# this object isn't very valid
|
||||
|
||||
def __getattribute__(self, name):
|
||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
||||
if name in {"profile_name", "target_name"}:
|
||||
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
# avoid every attribute access triggering infinite recursion
|
||||
return RuntimeConfig.__getattribute__(self, name)
|
||||
|
||||
def to_target_dict(self):
|
||||
# re-override the poisoned profile behavior
|
||||
return DictDefaultEmptyStr({})
|
||||
|
||||
def to_project_config(self, with_packages=False):
|
||||
"""Return a dict representation of the config that could be written to
|
||||
disk with `yaml.safe_dump` to get this configuration.
|
||||
|
||||
Overrides dbt.config.Project.to_project_config to omit undefined profile
|
||||
attributes.
|
||||
|
||||
:param with_packages bool: If True, include the serialized packages
|
||||
file in the root.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = deepcopy(
|
||||
{
|
||||
"name": self.project_name,
|
||||
"version": self.version,
|
||||
"project-root": self.project_root,
|
||||
"profile": "",
|
||||
"model-paths": self.model_paths,
|
||||
"macro-paths": self.macro_paths,
|
||||
"seed-paths": self.seed_paths,
|
||||
"test-paths": self.test_paths,
|
||||
"analysis-paths": self.analysis_paths,
|
||||
"docs-paths": self.docs_paths,
|
||||
"asset-paths": self.asset_paths,
|
||||
"target-path": self.target_path,
|
||||
"snapshot-paths": self.snapshot_paths,
|
||||
"clean-targets": self.clean_targets,
|
||||
"log-path": self.log_path,
|
||||
"quoting": self.quoting,
|
||||
"models": self.models,
|
||||
"on-run-start": self.on_run_start,
|
||||
"on-run-end": self.on_run_end,
|
||||
"dispatch": self.dispatch,
|
||||
"seeds": self.seeds,
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
result["query-comment"] = self.query_comment.to_dict(omit_none=True)
|
||||
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict(omit_none=True))
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
project: Project,
|
||||
profile: Profile,
|
||||
args: Any,
|
||||
dependencies: Optional[Mapping[str, "RuntimeConfig"]] = None,
|
||||
) -> "RuntimeConfig":
|
||||
"""Instantiate a RuntimeConfig from its components.
|
||||
|
||||
:param profile: Ignored.
|
||||
:param project: A parsed dbt Project.
|
||||
:param args: The parsed command-line arguments.
|
||||
:returns RuntimeConfig: The new configuration.
|
||||
"""
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
asset_paths=project.asset_paths,
|
||||
target_path=project.target_path,
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=project.quoting, # we never use this anyway.
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
dispatch=project.dispatch,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
project_env_vars=project.project_env_vars,
|
||||
profile_env_vars=profile.profile_env_vars,
|
||||
profile_name="",
|
||||
target_name="",
|
||||
user_config=UserConfig(),
|
||||
threads=getattr(args, "threads", 1),
|
||||
credentials=UnsetCredentials(),
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
profile = UnsetProfile()
|
||||
# The profile (for warehouse connection) is not needed, but we want
|
||||
# to get the UserConfig, which is also in profiles.yml
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
profile.user_config = user_config
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def from_args(cls: Type[RuntimeConfig], args: Any) -> "RuntimeConfig":
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
read in packages.yml if it exists, and use them to find the profile to
|
||||
load.
|
||||
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises DbtValidationError: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
return cls.from_parts(project=project, profile=profile, args=args)
|
||||
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
||||
Configuration paths exist in your dbt_project.yml file which do not \
|
||||
apply to any resources.
|
||||
There are {} unused configuration paths:
|
||||
{}
|
||||
"""
|
||||
|
||||
|
||||
def _is_config_used(path, fqns):
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from xmlrpc.client import Boolean
|
||||
from dbt.contracts.project import UserConfig
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.config import Profile, Project, read_user_config
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import InvalidOptionYAML
|
||||
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
|
||||
@@ -27,49 +22,3 @@ def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, An
|
||||
except DbtValidationError:
|
||||
fire_event(InvalidOptionYAML(option_name=cli_option_name))
|
||||
raise
|
||||
|
||||
|
||||
def get_project_config(
|
||||
project_path: str,
|
||||
profile_name: str,
|
||||
args: Namespace = Namespace(),
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
profile: Optional[Profile] = None,
|
||||
user_config: Optional[UserConfig] = None,
|
||||
return_dict: Boolean = True,
|
||||
) -> Union[Project, Dict]:
|
||||
"""Returns a project config (dict or object) from a given project path and profile name.
|
||||
|
||||
Args:
|
||||
project_path: Path to project
|
||||
profile_name: Name of profile
|
||||
args: An argparse.Namespace that represents what would have been passed in on the
|
||||
command line (optional)
|
||||
cli_vars: A dict of any vars that would have been passed in on the command line (optional)
|
||||
(see parse_cli_vars above for formatting details)
|
||||
profile: A dbt.config.profile.Profile object (optional)
|
||||
user_config: A dbt.contracts.project.UserConfig object (optional)
|
||||
return_dict: Return a dict if true, return the full dbt.config.project.Project object if false
|
||||
|
||||
Returns:
|
||||
A full project config
|
||||
|
||||
"""
|
||||
# Generate a profile if not provided
|
||||
if profile is None:
|
||||
# Generate user_config if not provided
|
||||
if user_config is None:
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
# Update flags
|
||||
flags.set_from_args(args, user_config)
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
profile = Profile.render_from_args(args, ProfileRenderer(cli_vars), profile_name)
|
||||
# Generate a project
|
||||
project = Project.from_project_root(
|
||||
project_path,
|
||||
DbtProjectYamlRenderer(profile),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
# Return
|
||||
return project.to_project_config() if return_dict else project
|
||||
|
||||
@@ -2,7 +2,8 @@ import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
import dbt.flags as flags_module
|
||||
from dbt import tracking
|
||||
from dbt import utils
|
||||
from dbt.clients.jinja import get_rendered
|
||||
@@ -29,6 +30,9 @@ import datetime
|
||||
import re
|
||||
import itertools
|
||||
|
||||
import importlib
|
||||
metricflow_module = importlib.util.find_spec("metricflow")
|
||||
|
||||
# See the `contexts` module README for more information on how contexts work
|
||||
|
||||
|
||||
@@ -50,6 +54,11 @@ def get_re_module_context() -> Dict[str, Any]:
|
||||
|
||||
return {name: getattr(re, name) for name in context_exports}
|
||||
|
||||
if metricflow_module is not None:
|
||||
def get_metricflow_module_context() -> Dict[str, Any]:
|
||||
from metricflow.api.metricflow_client import MetricFlowClient
|
||||
context_exports = ["explain"]
|
||||
return {name: getattr(MetricFlowClient, name) for name in context_exports}
|
||||
|
||||
def get_itertools_module_context() -> Dict[str, Any]:
|
||||
# Excluded dropwhile, filterfalse, takewhile and groupby;
|
||||
@@ -635,7 +644,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
||||
"""
|
||||
return flags.get_flag_obj()
|
||||
return flags_module.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
@@ -651,7 +660,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endmacro %}"
|
||||
"""
|
||||
|
||||
if not flags.NO_PRINT:
|
||||
if not get_flags().PRINT:
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user