mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
316 Commits
adding-sem
...
v1.5.4rc1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7083c0e14f | ||
|
|
a98b69c77c | ||
|
|
fac2f62ea8 | ||
|
|
d39c53d160 | ||
|
|
d26028876f | ||
|
|
0d8f6d0784 | ||
|
|
efdbb2b9e2 | ||
|
|
4bd3a40e4c | ||
|
|
774e905e26 | ||
|
|
2d7582997a | ||
|
|
f466740cc5 | ||
|
|
b3975dbd96 | ||
|
|
3e5e693442 | ||
|
|
a935df97ca | ||
|
|
98fcb4ac55 | ||
|
|
b1874006e6 | ||
|
|
66dd15b29d | ||
|
|
39702dc4b7 | ||
|
|
36e6c67d47 | ||
|
|
043e511ec0 | ||
|
|
9776e7a7e8 | ||
|
|
e1a9d9f94f | ||
|
|
57dacb02b9 | ||
|
|
49627fcac8 | ||
|
|
126d68732a | ||
|
|
83f02516b1 | ||
|
|
693a338642 | ||
|
|
11d9f9979e | ||
|
|
27eb48c1ff | ||
|
|
7f2bdbf85d | ||
|
|
66e0644558 | ||
|
|
3a81fa9f47 | ||
|
|
467cae06a2 | ||
|
|
d94d6518e1 | ||
|
|
963a38fece | ||
|
|
e5bd8b0233 | ||
|
|
f8cc136bf7 | ||
|
|
de9ee6a580 | ||
|
|
a4ed7cefcb | ||
|
|
355f918afd | ||
|
|
630681b57f | ||
|
|
890bc168f3 | ||
|
|
a3e6a487ef | ||
|
|
8887c0ca0b | ||
|
|
b06a8ebef4 | ||
|
|
06c7a8a9cb | ||
|
|
19e2f2e8ce | ||
|
|
9804e6715d | ||
|
|
9642789817 | ||
|
|
7844ac4bf4 | ||
|
|
7fa6ce8aa4 | ||
|
|
2a3cab9ec7 | ||
|
|
c578e5b080 | ||
|
|
cb9e4d5dae | ||
|
|
70c98a5495 | ||
|
|
14796b2bd4 | ||
|
|
0cd20fffc3 | ||
|
|
251917198c | ||
|
|
2eaa4f084c | ||
|
|
dafb6aeb93 | ||
|
|
c79a65828f | ||
|
|
f37cb927ae | ||
|
|
77867d76f8 | ||
|
|
1da608f2a0 | ||
|
|
1b6cf1a67f | ||
|
|
1d24e94e5d | ||
|
|
437870eb13 | ||
|
|
dee5e70a06 | ||
|
|
f0530e66b3 | ||
|
|
faa279d38d | ||
|
|
c1f3cc1f13 | ||
|
|
d6526b037c | ||
|
|
ffb5a8ff37 | ||
|
|
a67c14ee87 | ||
|
|
9ffe2647ee | ||
|
|
06437df286 | ||
|
|
74419b0e86 | ||
|
|
2ddf296a8e | ||
|
|
6b42a712a8 | ||
|
|
c3230d3374 | ||
|
|
602535fe71 | ||
|
|
f9b28bcaed | ||
|
|
922c75344b | ||
|
|
2caf87c247 | ||
|
|
f2a3535c3f | ||
|
|
a500e60b7f | ||
|
|
c7ebc8935f | ||
|
|
56f8f8a329 | ||
|
|
828d723512 | ||
|
|
b450a5754e | ||
|
|
2971b9a027 | ||
|
|
3c54959829 | ||
|
|
87e25e8692 | ||
|
|
6ac5c90a0b | ||
|
|
a58fb24e2b | ||
|
|
9ce593c47f | ||
|
|
c9d4051136 | ||
|
|
26f3518cea | ||
|
|
49eed67ab0 | ||
|
|
7a4d3bd2dc | ||
|
|
2afb4ccd68 | ||
|
|
f38d5ad8e2 | ||
|
|
7e1f04c667 | ||
|
|
ef2ba39dcf | ||
|
|
7045e11aa0 | ||
|
|
a9016c37f5 | ||
|
|
fe62ab8ec5 | ||
|
|
893daedc42 | ||
|
|
44be13b006 | ||
|
|
a5131ecc7d | ||
|
|
ce5d02569f | ||
|
|
4fc7456000 | ||
|
|
28e3412556 | ||
|
|
86fe510bcf | ||
|
|
eaedbd3187 | ||
|
|
b31fcc4edf | ||
|
|
edb5634b9a | ||
|
|
ad21458e10 | ||
|
|
622bc43ced | ||
|
|
e5d99da0bc | ||
|
|
618499b379 | ||
|
|
bca361acf9 | ||
|
|
567e2ca2be | ||
|
|
474143466f | ||
|
|
050161c78f | ||
|
|
ab496af1f0 | ||
|
|
c3c2b27e97 | ||
|
|
5789d717ba | ||
|
|
14e2c3ec21 | ||
|
|
b718c537a7 | ||
|
|
6992151081 | ||
|
|
bf5ed39db3 | ||
|
|
f573870232 | ||
|
|
da4a90aa11 | ||
|
|
2cfc386773 | ||
|
|
ae485f996a | ||
|
|
73ff497200 | ||
|
|
9a7305d43f | ||
|
|
ca23148908 | ||
|
|
8225a009b5 | ||
|
|
9605b76178 | ||
|
|
137dd9aa1b | ||
|
|
a203fe866a | ||
|
|
4186f99b74 | ||
|
|
6db899eddd | ||
|
|
8ea20b4ba2 | ||
|
|
3f76f82c88 | ||
|
|
6cbf66db58 | ||
|
|
8cd11b380f | ||
|
|
814eb65d59 | ||
|
|
f24452a3ab | ||
|
|
30503697f2 | ||
|
|
90902689c3 | ||
|
|
5a0e776cff | ||
|
|
9368e7a6a1 | ||
|
|
c02ddf8c0e | ||
|
|
64b8a12a42 | ||
|
|
e895fe9e4b | ||
|
|
8d987521dd | ||
|
|
4aafc5ef4a | ||
|
|
24ca76ea58 | ||
|
|
b681908ee2 | ||
|
|
72076b3fe5 | ||
|
|
0683c59dcd | ||
|
|
8019498f09 | ||
|
|
6234aec7d2 | ||
|
|
edd8059eb3 | ||
|
|
e3be347768 | ||
|
|
597acf1fa1 | ||
|
|
effa1a0813 | ||
|
|
726800be57 | ||
|
|
8b79747908 | ||
|
|
ec5d31de0e | ||
|
|
5d61ebbfdb | ||
|
|
0ef9931d19 | ||
|
|
a2213abbc0 | ||
|
|
915585c36e | ||
|
|
5ddd40885e | ||
|
|
58d1bccd26 | ||
|
|
70c26f5c74 | ||
|
|
ac962a4a31 | ||
|
|
bb2d062cc5 | ||
|
|
7667784985 | ||
|
|
05ecfbcc3a | ||
|
|
e06ae97068 | ||
|
|
ed50877c4f | ||
|
|
6b5e38ee28 | ||
|
|
63a1bf9adb | ||
|
|
2c7238fbb4 | ||
|
|
b1d597109f | ||
|
|
7617eece3a | ||
|
|
8ce92b56d7 | ||
|
|
21fae1c4a4 | ||
|
|
c952d44ec5 | ||
|
|
971b38c26b | ||
|
|
b7884facbf | ||
|
|
57ce461067 | ||
|
|
b1b830643e | ||
|
|
3cee9d16fa | ||
|
|
c647706ac2 | ||
|
|
7b33ffb1bd | ||
|
|
f38cbc4feb | ||
|
|
480e0e55c5 | ||
|
|
e5c468bb93 | ||
|
|
605c72e86e | ||
|
|
aad46ac5a8 | ||
|
|
d85618ef26 | ||
|
|
1250f23c44 | ||
|
|
daea7d59a7 | ||
|
|
4575757c2a | ||
|
|
d7a2f77705 | ||
|
|
4a4b89606b | ||
|
|
1ebe2e7118 | ||
|
|
f1087e57bf | ||
|
|
250537ba58 | ||
|
|
ccc7222868 | ||
|
|
311a57a21e | ||
|
|
b7c45de6b1 | ||
|
|
c53c3cf181 | ||
|
|
a77d325c8a | ||
|
|
dd41384d82 | ||
|
|
aa55fb2d30 | ||
|
|
864f4efb8b | ||
|
|
83c5a8c24b | ||
|
|
57aef33fb3 | ||
|
|
6d78e5e640 | ||
|
|
f54a876f65 | ||
|
|
8bbae7926b | ||
|
|
db2b12021e | ||
|
|
8b2c9bf39d | ||
|
|
298bf8a1d4 | ||
|
|
77748571b4 | ||
|
|
8ce4c289c5 | ||
|
|
abbece8876 | ||
|
|
3ad40372e6 | ||
|
|
c6d0e7c926 | ||
|
|
bc015843d4 | ||
|
|
df64511feb | ||
|
|
db0981afe7 | ||
|
|
dcf6544f93 | ||
|
|
c2c8959fee | ||
|
|
ccb4fa26cd | ||
|
|
d0b5d752df | ||
|
|
4c63b630de | ||
|
|
9c0b62b4f5 | ||
|
|
e08eede5e2 | ||
|
|
05e53d4143 | ||
|
|
b2ea2b8b25 | ||
|
|
2245d8d710 | ||
|
|
d9424cc710 | ||
|
|
0503c141b7 | ||
|
|
1a6e4a00c7 | ||
|
|
42b7caae19 | ||
|
|
622e5fd71d | ||
|
|
59d773ea7e | ||
|
|
84bf5b4620 | ||
|
|
726c4d6c58 | ||
|
|
acc88d47a3 | ||
|
|
0a74594d09 | ||
|
|
d2f3cdd6de | ||
|
|
92d1ef8482 | ||
|
|
a8abc49632 | ||
|
|
d6ac340df0 | ||
|
|
c653330911 | ||
|
|
82d9b2fa87 | ||
|
|
3f96fad4f9 | ||
|
|
c2c4757a2b | ||
|
|
08b2d94ccd | ||
|
|
7fa61f0816 | ||
|
|
c65ba11ae6 | ||
|
|
b0651b13b5 | ||
|
|
a34521ec07 | ||
|
|
da47b90503 | ||
|
|
d27016a4e7 | ||
|
|
db99e2f68d | ||
|
|
cbb9117ab9 | ||
|
|
e2ccf011d9 | ||
|
|
17014bfad3 | ||
|
|
92b7166c10 | ||
|
|
7b464b8a49 | ||
|
|
5c765bf3e2 | ||
|
|
93619a9a37 | ||
|
|
a181cee6ae | ||
|
|
a0ade13f5a | ||
|
|
9823a56e1d | ||
|
|
3aeab73740 | ||
|
|
9801eebc58 | ||
|
|
11c622230c | ||
|
|
f0349488ed | ||
|
|
c85be323f5 | ||
|
|
6954c4df1b | ||
|
|
30a1595f72 | ||
|
|
f841a7ca76 | ||
|
|
07a004b301 | ||
|
|
b05582de39 | ||
|
|
fa7c4d19f0 | ||
|
|
1913eac5ed | ||
|
|
066346faa2 | ||
|
|
0a03355ceb | ||
|
|
53127daad8 | ||
|
|
91b20b7482 | ||
|
|
5b31cc4266 | ||
|
|
9bb1250869 | ||
|
|
cc5a38ec5a | ||
|
|
b0909b8f5d | ||
|
|
5d278dacf1 | ||
|
|
ce1aaec31d | ||
|
|
1809852a0d | ||
|
|
88d2ee4813 | ||
|
|
77be2e4fdf | ||
|
|
e91863de59 | ||
|
|
44b457c191 | ||
|
|
a0ec0b6f9d | ||
|
|
1ec54abdc4 | ||
|
|
5efc4aa066 | ||
|
|
847c0b9644 |
@@ -1,13 +1,19 @@
|
||||
[bumpversion]
|
||||
current_version = 1.5.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
current_version = 1.5.4rc1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
||||
(?P<prekind>a|b|rc) # pre-release type
|
||||
(?P<num>[\d]+) # pre-release version number
|
||||
)?
|
||||
( # optional nightly release indicator
|
||||
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
||||
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||
{major}.{minor}.{patch}.{nightly}
|
||||
{major}.{minor}.{patch}{prekind}{num}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
@@ -21,9 +27,11 @@ values =
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:pre]
|
||||
[bumpversion:part:num]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:nightly]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
189
.changes/1.5.0.md
Normal file
189
.changes/1.5.0.md
Normal file
@@ -0,0 +1,189 @@
|
||||
## dbt-core 1.5.0 - April 27, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Allow `--select` and `--exclude` multiple times ([#7158](https://github.com/dbt-labs/dbt-core/issues/7158))
|
||||
- Specifying "log-path" and "target-path" in "dbt_project.yml" is deprecated. This functionality will be removed in a future version of dbt-core. If you need to specify a custom path for logs or artifacts, please set via CLI flag or env var instead. ([#6882](https://github.com/dbt-labs/dbt-core/issues/6882))
|
||||
- Remove exception functions marked as deprecated in 1.4 release ([#6578](https://github.com/dbt-labs/dbt-core/issues/6578))
|
||||
|
||||
### Features
|
||||
|
||||
- Data type constraints are now native to SQL table materializations. Enforce columns are specific data types and not null depending on database functionality. ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
- Have dbt debug spit out structured json logs with flags enabled. ([#5353](https://github.com/dbt-labs/dbt-core/issues/5353))
|
||||
- ✨ add unix-style wildcard selector method ([#6598](https://github.com/dbt-labs/dbt-core/issues/6598))
|
||||
- add adapter_response to dbt test and freshness result ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- add support for DBT_PROJECT_DIR env var ([#6078](https://github.com/dbt-labs/dbt-core/issues/6078))
|
||||
- Improve error message for packages missing `dbt_project.yml` ([#6663](https://github.com/dbt-labs/dbt-core/issues/6663))
|
||||
- Make project version optional ([#6603](https://github.com/dbt-labs/dbt-core/issues/6603))
|
||||
- Adjust makefile to have clearer instructions for CI env var changes. ([#6689](https://github.com/dbt-labs/dbt-core/issues/6689))
|
||||
- Stand-alone Python module for PostgresColumn ([#6772](https://github.com/dbt-labs/dbt-core/issues/6772))
|
||||
- Enable diff based partial parsing ([#6592](https://github.com/dbt-labs/dbt-core/issues/6592))
|
||||
- Exposure owner requires one of name or email keys, and accepts additional arbitrary keys ([#6833](https://github.com/dbt-labs/dbt-core/issues/6833))
|
||||
- Parse 'group' resource ([#6921](https://github.com/dbt-labs/dbt-core/issues/6921))
|
||||
- parse 'group' config on groupable nodes ([#6823](https://github.com/dbt-labs/dbt-core/issues/6823))
|
||||
- Implemented new log cli parameters for finer-grained control. ([#6639](https://github.com/dbt-labs/dbt-core/issues/6639))
|
||||
- Add access attribute to parsed nodes ([#6824](https://github.com/dbt-labs/dbt-core/issues/6824))
|
||||
- Enforce contracts on models materialized as tables, views, and incremental ([#6751](https://github.com/dbt-labs/dbt-core/issues/6751), [#7034](https://github.com/dbt-labs/dbt-core/issues/7034), [#6756](https://github.com/dbt-labs/dbt-core/issues/6756), [#7154](https://github.com/dbt-labs/dbt-core/issues/7154))
|
||||
- Add ability to select by group resource ([#6825](https://github.com/dbt-labs/dbt-core/issues/6825))
|
||||
- Disallow refing private model across groups ([#6826](https://github.com/dbt-labs/dbt-core/issues/6826))
|
||||
- make version configs optional ([#7054](https://github.com/dbt-labs/dbt-core/issues/7054))
|
||||
- [CT-1584] New top level commands: interactive compile ([#6358](https://github.com/dbt-labs/dbt-core/issues/6358))
|
||||
- Make model contracts agnostic to ordering ([#6975](https://github.com/dbt-labs/dbt-core/issues/6975), [#7064](https://github.com/dbt-labs/dbt-core/issues/7064))
|
||||
- Unified constraints and check_constraints properties for columns and models ([#7066](https://github.com/dbt-labs/dbt-core/issues/7066))
|
||||
- Switch from betterproto to google protobuf and enable more flexible meta dictionary in logs ([#6832](https://github.com/dbt-labs/dbt-core/issues/6832))
|
||||
- Ignore duplicate edges in subgraph to speed up dbt build ([#7191](https://github.com/dbt-labs/dbt-core/issues/7191))
|
||||
- Support setting of callbacks for programmatic uses of `dbtRunner` ([#6763](https://github.com/dbt-labs/dbt-core/issues/6763))
|
||||
- Detect breaking changes to contracts in state:modified check ([#6869](https://github.com/dbt-labs/dbt-core/issues/6869))
|
||||
- New command: dbt show ([#7207](https://github.com/dbt-labs/dbt-core/issues/7207), [#7179](https://github.com/dbt-labs/dbt-core/issues/7179), [#6359](https://github.com/dbt-labs/dbt-core/issues/6359))
|
||||
- Added prettier printing to ContractError class ([#7209](https://github.com/dbt-labs/dbt-core/issues/7209))
|
||||
- Add support for model-level constraints ([#6754](https://github.com/dbt-labs/dbt-core/issues/6754))
|
||||
- model versions ([##7263](https://github.com/dbt-labs/dbt-core/issues/#7263))
|
||||
- Add relation info (database, schema, alias) to node_info dictionary in structured logging ([#6724](https://github.com/dbt-labs/dbt-core/issues/6724))
|
||||
- Add --no-populate-cache to optionally skip relation cache population ([#1751](https://github.com/dbt-labs/dbt-core/issues/1751))
|
||||
- select resources by patch path ([#7315](https://github.com/dbt-labs/dbt-core/issues/7315))
|
||||
- Add version selector method ([#7199](https://github.com/dbt-labs/dbt-core/issues/7199))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Remove trailing slashes from source paths (#6102) ([#6102](https://github.com/dbt-labs/dbt-core/issues/6102))
|
||||
- add merge_exclude_columns adapter tests ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
- Snapshot strategies: add a newline for subquery ([#6781](https://github.com/dbt-labs/dbt-core/issues/6781))
|
||||
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||
- Add double type to list of float column types for the column class ([#6876](https://github.com/dbt-labs/dbt-core/issues/6876))
|
||||
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||
- Reapply logging fixes which were accidentally reverted ([#6936](https://github.com/dbt-labs/dbt-core/issues/6936))
|
||||
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||
- Readd initialization events, --log-cache-events in new CLI ([#6933](https://github.com/dbt-labs/dbt-core/issues/6933))
|
||||
- Fix previous state tests and disabled exposures, metrics ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752), [#6753](https://github.com/dbt-labs/dbt-core/issues/6753))
|
||||
- Make use of hashlib.md5() FIPS compliant ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
- add timeout for dbt --version command ([#6992](https://github.com/dbt-labs/dbt-core/issues/6992))
|
||||
- Fix compilation logic for ephemeral nodes ([#6885](https://github.com/dbt-labs/dbt-core/issues/6885))
|
||||
- Fix semver comparison logic by ensuring numeric values ([#7039](https://github.com/dbt-labs/dbt-core/issues/7039))
|
||||
- add pytz dependency ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
- allow adapters to change model name resolution in py models ([#7114](https://github.com/dbt-labs/dbt-core/issues/7114))
|
||||
- Add exception handling in postflight decorator to address exit codes ([#7010](https://github.com/dbt-labs/dbt-core/issues/7010))
|
||||
- Recreates missing tracking events ([#6097](https://github.com/dbt-labs/dbt-core/issues/6097), [#6098](https://github.com/dbt-labs/dbt-core/issues/6098))
|
||||
- Fix partial parsing error due to not requiring "version" ([#7236](https://github.com/dbt-labs/dbt-core/issues/7236))
|
||||
- Handle internal exceptions ([#7118](https://github.com/dbt-labs/dbt-core/issues/7118))
|
||||
- Improved failed event serialization handling and associated tests ([#7113](https://github.com/dbt-labs/dbt-core/issues/7113), [#7108](https://github.com/dbt-labs/dbt-core/issues/7108), [#6568](https://github.com/dbt-labs/dbt-core/issues/6568))
|
||||
- Fix handling of artifacts in read_and_check_versions ([#7252](https://github.com/dbt-labs/dbt-core/issues/7252))
|
||||
- Stringify datetimes in logging for prettier messages ([#7255](https://github.com/dbt-labs/dbt-core/issues/7255))
|
||||
- avoid dbtRunner default callbacks being shared across instances ([#7278](https://github.com/dbt-labs/dbt-core/issues/7278))
|
||||
- Ensure same_contract is called for state:modified ([#7282](https://github.com/dbt-labs/dbt-core/issues/7282))
|
||||
- Avoid revoking grants for views when `copy_grants=true` ([#7280](https://github.com/dbt-labs/dbt-core/issues/7280))
|
||||
- Duplicated flags now throw errors instead of being overidden by parent-level flag ([#6913](https://github.com/dbt-labs/dbt-core/issues/6913))
|
||||
- Ensure that invocation_id changes between programmatic invocations. ([#7197](https://github.com/dbt-labs/dbt-core/issues/7197))
|
||||
- Adding a new column is not a breaking contract change ([#7332](https://github.com/dbt-labs/dbt-core/issues/7332))
|
||||
- fix versioned model selection in subdirectories ([#7348](https://github.com/dbt-labs/dbt-core/issues/7348))
|
||||
- safe version attribute access in _check_resource_uniqueness ([#7375](https://github.com/dbt-labs/dbt-core/issues/7375))
|
||||
- Fix dbt command missing target-path param ([# 7411](https://github.com/dbt-labs/dbt-core/issues/ 7411))
|
||||
- Fix v0 ref resolution ([#7408](https://github.com/dbt-labs/dbt-core/issues/7408))
|
||||
- fix groupable node partial parsing, raise DbtReferenceError at runtime for safety ([#7437](https://github.com/dbt-labs/dbt-core/issues/7437))
|
||||
- Fix partial parsing of latest_version changes for downstream references ([#7369](https://github.com/dbt-labs/dbt-core/issues/7369))
|
||||
|
||||
### Docs
|
||||
|
||||
- Improve displayed message under "Arguments" section for argumentless macro ([dbt-docs/#358](https://github.com/dbt-labs/dbt-docs/issues/358))
|
||||
- update link to installation instructions ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||
- Searchable column descriptions ([dbt-docs/#140](https://github.com/dbt-labs/dbt-docs/issues/140), [dbt-docs/#322](https://github.com/dbt-labs/dbt-docs/issues/322), [dbt-docs/#369](https://github.com/dbt-labs/dbt-docs/issues/369))
|
||||
- Add access property to model details ([dbt-docs/#381](https://github.com/dbt-labs/dbt-docs/issues/381))
|
||||
- Display model owner by name and email ([dbt-docs/#377](https://github.com/dbt-labs/dbt-docs/issues/377))
|
||||
- Add view of public models sorted by group to left navigation ([dbt-docs/#379](https://github.com/dbt-labs/dbt-docs/issues/379))
|
||||
- Distiguish node "access" in the DAG with node borders & opacity. ([dbt-docs/#378](https://github.com/dbt-labs/dbt-docs/issues/378))
|
||||
- Fix JSON path to package overview docs ([dbt-docs/#390](https://github.com/dbt-labs/dbt-docs/issues/390))
|
||||
- Add selection by group to DAG ([dbt-docs/#380](https://github.com/dbt-labs/dbt-docs/issues/380))
|
||||
- Add support for model versions ([dbt-docs/#406](https://github.com/dbt-labs/dbt-docs/issues/406))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- [CT-921] dbt compile works in click ([#5545](https://github.com/dbt-labs/dbt-core/issues/5545))
|
||||
- Fix use of ConnectionReused logging event ([#6168](https://github.com/dbt-labs/dbt-core/issues/6168))
|
||||
- Port docs tests to pytest ([#6573](https://github.com/dbt-labs/dbt-core/issues/6573))
|
||||
- Update deprecated github action command ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- dbt snapshot works in click ([#5554](https://github.com/dbt-labs/dbt-core/issues/5554))
|
||||
- dbt list working with click ([#5549](https://github.com/dbt-labs/dbt-core/issues/5549))
|
||||
- Add dbt run-operation to click CLI ([#5552](https://github.com/dbt-labs/dbt-core/issues/5552))
|
||||
- dbt build working with new click framework ([#5541](https://github.com/dbt-labs/dbt-core/issues/5541))
|
||||
- dbt docs generate works with new click framework ([#5543](https://github.com/dbt-labs/dbt-core/issues/5543))
|
||||
- Replaced the EmptyLine event with a more general Formatting event, and added a Note event. ([#6481](https://github.com/dbt-labs/dbt-core/issues/6481))
|
||||
- Small optimization on manifest parsing benefitting large DAGs ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- Revised and simplified various structured logging events ([#6664](https://github.com/dbt-labs/dbt-core/issues/6664), [#6665](https://github.com/dbt-labs/dbt-core/issues/6665), [#6666](https://github.com/dbt-labs/dbt-core/issues/6666))
|
||||
- dbt init works with click ([#5548](https://github.com/dbt-labs/dbt-core/issues/5548))
|
||||
- [CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands ([#5544](https://github.com/dbt-labs/dbt-core/issues/5544), [#6722](https://github.com/dbt-labs/dbt-core/issues/6722))
|
||||
- Migrate debug task to click ([#5546](https://github.com/dbt-labs/dbt-core/issues/5546))
|
||||
- Optimized GraphQueue to remove graph analysis bottleneck in large dags. ([#6759](https://github.com/dbt-labs/dbt-core/issues/6759))
|
||||
- Implement --version for click cli ([#6757](https://github.com/dbt-labs/dbt-core/issues/6757))
|
||||
- [CT-1841] Convert custom target test to Pytest ([#6638](https://github.com/dbt-labs/dbt-core/issues/6638))
|
||||
- Remove BigQuery-specific btye abbreviations ([#6741](https://github.com/dbt-labs/dbt-core/issues/6741))
|
||||
- warn_error/warn_error_options mutual exclusivity in click ([#6579](https://github.com/dbt-labs/dbt-core/issues/6579))
|
||||
- Enables the new Click Cli on the commandline! 🚀 ([#6784](https://github.com/dbt-labs/dbt-core/issues/6784))
|
||||
- Lazily call --version ([#6812](https://github.com/dbt-labs/dbt-core/issues/6812))
|
||||
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
- flags.THREADS defaults to None ([#6887](https://github.com/dbt-labs/dbt-core/issues/6887))
|
||||
- Fixing target type exposure error ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- Test binary serialization of logging events ([#6852](https://github.com/dbt-labs/dbt-core/issues/6852))
|
||||
- Treat contract config as a python object ([#6748](https://github.com/dbt-labs/dbt-core/issues/6748), [#7184](https://github.com/dbt-labs/dbt-core/issues/7184))
|
||||
- Add deprecation warning for DBT_NO_PRINT ([#6960](https://github.com/dbt-labs/dbt-core/issues/6960))
|
||||
- Make output_keys click param multi-option instead of a string ([#6676](https://github.com/dbt-labs/dbt-core/issues/6676))
|
||||
- Remove cli doc generation workflow ([#7088](https://github.com/dbt-labs/dbt-core/issues/7088))
|
||||
- Move validation of group earlier ([#7087](https://github.com/dbt-labs/dbt-core/issues/7087))
|
||||
- Deprecate additional environment variables ([#6903](https://github.com/dbt-labs/dbt-core/issues/6903))
|
||||
- Add CommandCompleted event, and fire it upon completion of every command ([#6878](https://github.com/dbt-labs/dbt-core/issues/6878))
|
||||
- Improves build times for common selections by improving subgraph calculation ([#7195](https://github.com/dbt-labs/dbt-core/issues/7195))
|
||||
- Remove upper pin for hologram/jsonschema ([#6775](https://github.com/dbt-labs/dbt-core/issues/6775))
|
||||
- Generalize constraint compatibility warnings ([#7067](https://github.com/dbt-labs/dbt-core/issues/7067))
|
||||
- Add kwargs support to dbtRunner ([#7070](https://github.com/dbt-labs/dbt-core/issues/7070))
|
||||
- Add unique_id to ShowNode and CompiledNode logging events ([#7305](https://github.com/dbt-labs/dbt-core/issues/7305))
|
||||
- Prettify message for ListRelations event ([#7310](https://github.com/dbt-labs/dbt-core/issues/7310))
|
||||
- `Parse` now returns manifest when invoked via dbtRunner ([#6547](https://github.com/dbt-labs/dbt-core/issues/6547))
|
||||
- Track data about group, access, contract, version usage ([#7170](https://github.com/dbt-labs/dbt-core/issues/7170), [#7171](https://github.com/dbt-labs/dbt-core/issues/7171))
|
||||
- Update docs link in ContractBreakingChangeError message ([#7366](https://github.com/dbt-labs/dbt-core/issues/7366))
|
||||
- Update --help text for cache-related parameters ([#7381](https://github.com/dbt-labs/dbt-core/issues/7381))
|
||||
- Small UX improvements to model versions: Support defining latest_version in unsuffixed file by default. Notify on unpinned ref when a prerelease version is available. ([#7443](https://github.com/dbt-labs/dbt-core/issues/7443))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pathspec requirement from <0.11,>=0.9 to >=0.9,<0.12 in /core ([#6737](https://github.com/dbt-labs/dbt-core/pull/6737))
|
||||
- Bump ubuntu from 22.04 to 23.04 ([#6865](https://github.com/dbt-labs/dbt-core/pull/6865))
|
||||
- Revert hoisting dbt.cli.main into the dbt.name namespace ([#](https://github.com/dbt-labs/dbt-core/pull/))
|
||||
- Bump python from 3.11.1-slim-bullseye to 3.11.2-slim-bullseye in /docker ([#7196](https://github.com/dbt-labs/dbt-core/pull/7196))
|
||||
- Bump black from 22.12.0 to 23.3.0 ([#7243](https://github.com/dbt-labs/dbt-core/pull/7243))
|
||||
- Bump mashumaro[msgpack] from 3.3.1 to 3.6 ([#7294](https://github.com/dbt-labs/dbt-core/pull/7294))
|
||||
|
||||
### Dependency
|
||||
|
||||
- Bump mypy from 0.971 to 0.981 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- Bump python from 3.10.7-slim-bullseye to 3.11.1-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- Bump black from 22.10.0 to 22.12.0 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
|
||||
### Contributors
|
||||
- [@@ttusing](https://github.com/@ttusing) ([#7195](https://github.com/dbt-labs/dbt-core/issues/7195))
|
||||
- [@Goodkat](https://github.com/Goodkat) ([#6992](https://github.com/dbt-labs/dbt-core/issues/6992))
|
||||
- [@MartinGuindon](https://github.com/MartinGuindon) ([#358](https://github.com/dbt-labs/dbt-core/issues/358))
|
||||
- [@MatthieuBlais](https://github.com/MatthieuBlais) ([#7191](https://github.com/dbt-labs/dbt-core/issues/7191))
|
||||
- [@RobbertDM](https://github.com/RobbertDM) ([#6781](https://github.com/dbt-labs/dbt-core/issues/6781))
|
||||
- [@aezomz](https://github.com/aezomz) ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- [@benallard](https://github.com/benallard) ([#7294](https://github.com/dbt-labs/dbt-core/pull/7294))
|
||||
- [@boxysean](https://github.com/boxysean) ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- [@chamini2](https://github.com/chamini2) ([#7278](https://github.com/dbt-labs/dbt-core/issues/7278))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#7054](https://github.com/dbt-labs/dbt-core/issues/7054), [#7315](https://github.com/dbt-labs/dbt-core/issues/7315), [#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- [@davidbloss](https://github.com/davidbloss) ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||
- [@jmg-duarte](https://github.com/jmg-duarte) ([#6102](https://github.com/dbt-labs/dbt-core/issues/6102))
|
||||
- [@kentkr](https://github.com/kentkr) ([#7209](https://github.com/dbt-labs/dbt-core/issues/7209))
|
||||
- [@leo-schick](https://github.com/leo-schick) ([#6078](https://github.com/dbt-labs/dbt-core/issues/6078))
|
||||
- [@nielspardon](https://github.com/nielspardon) ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
- [@rlh1994](https://github.com/rlh1994) ([#6876](https://github.com/dbt-labs/dbt-core/issues/6876), [#390](https://github.com/dbt-labs/dbt-core/issues/390))
|
||||
- [@ryancharris](https://github.com/ryancharris) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
- [@seub](https://github.com/seub) ([#6603](https://github.com/dbt-labs/dbt-core/issues/6603))
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
- [@z3z1ma](https://github.com/z3z1ma) ([#6598](https://github.com/dbt-labs/dbt-core/issues/6598))
|
||||
23
.changes/1.5.1.md
Normal file
23
.changes/1.5.1.md
Normal file
@@ -0,0 +1,23 @@
|
||||
## dbt-core 1.5.1 - May 30, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- fix typo in unpacking statically parsed ref ([#7364](https://github.com/dbt-labs/dbt-core/issues/7364))
|
||||
- Fix inverted `--print/--no-print` flag ([#7517](https://github.com/dbt-labs/dbt-core/issues/7517))
|
||||
- Back-compat for previous return type of 'collect_freshness' macro ([#7489](https://github.com/dbt-labs/dbt-core/issues/7489))
|
||||
- print model version in dbt show if specified ([#7407](https://github.com/dbt-labs/dbt-core/issues/7407))
|
||||
- Allow missing `profiles.yml` for `dbt deps` and `dbt init` ([#7511](https://github.com/dbt-labs/dbt-core/issues/7511))
|
||||
- Do not rewrite manifest.json during 'docs serve' command ([#7553](https://github.com/dbt-labs/dbt-core/issues/7553))
|
||||
- Pin protobuf to greater than 4.0.0 ([#7565](https://github.com/dbt-labs/dbt-core/issues/7565))
|
||||
- Throw error for duplicated versioned and unversioned models ([#7487](https://github.com/dbt-labs/dbt-core/issues/7487))
|
||||
- Fix: Relative project paths weren't working with deps ([#7491](https://github.com/dbt-labs/dbt-core/issues/7491))
|
||||
- Fall back if rendering the password field fails. ([#7629](https://github.com/dbt-labs/dbt-core/issues/7629))
|
||||
- Stringify flag paths for Jinja context ([#7495](https://github.com/dbt-labs/dbt-core/issues/7495))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Remove legacy file logger code ([#NA](https://github.com/dbt-labs/dbt-core/issues/NA))
|
||||
|
||||
### Contributors
|
||||
- [@iknox-fa](https://github.com/iknox-fa) ([#7491](https://github.com/dbt-labs/dbt-core/issues/7491), [#NA](https://github.com/dbt-labs/dbt-core/issues/NA))
|
||||
- [@thomasgjerdekog](https://github.com/thomasgjerdekog) ([#7517](https://github.com/dbt-labs/dbt-core/issues/7517))
|
||||
26
.changes/1.5.2.md
Normal file
26
.changes/1.5.2.md
Normal file
@@ -0,0 +1,26 @@
|
||||
## dbt-core 1.5.2 - June 22, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- add access selection syntax ([#7738](https://github.com/dbt-labs/dbt-core/issues/7738))
|
||||
- Add AdapterRegistered event log message ([#7038](https://github.com/dbt-labs/dbt-core/issues/7038))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add --target-path to dbt snapshot command. ([#7418](https://github.com/dbt-labs/dbt-core/issues/7418))
|
||||
- Constraint rendering fixes: wrap check expression in parentheses, foreign key 'references', support expression in all constraint types ([#7417](https://github.com/dbt-labs/dbt-core/issues/7417), [#7480](https://github.com/dbt-labs/dbt-core/issues/7480), [#7416](https://github.com/dbt-labs/dbt-core/issues/7416))
|
||||
- Fix warning messages for deprecated dbt_project.yml configs ([#7424](https://github.com/dbt-labs/dbt-core/issues/7424))
|
||||
- Respect column 'quote' config in model contracts ([#7370](https://github.com/dbt-labs/dbt-core/issues/7370))
|
||||
- Improve warnings for constraints and materialization types ([#7335](https://github.com/dbt-labs/dbt-core/issues/7335))
|
||||
- Incorrect paths used for "target" and "state" directories ([#7465](https://github.com/dbt-labs/dbt-core/issues/7465))
|
||||
- Using version 0 works when resolving single model ([#7372](https://github.com/dbt-labs/dbt-core/issues/7372))
|
||||
- Fix empty --warn-error-options error message ([#7730](https://github.com/dbt-labs/dbt-core/issues/7730))
|
||||
- send sql header on contract enforcement ([#7714](https://github.com/dbt-labs/dbt-core/issues/7714))
|
||||
- Fix path selector when using project-dir ([#7819](https://github.com/dbt-labs/dbt-core/issues/7819))
|
||||
- Allow dbt show --inline preview of private models ([#7837](https://github.com/dbt-labs/dbt-core/issues/7837))
|
||||
- Updating this error message to point to the correct URL ([#7789](https://github.com/dbt-labs/dbt-core/issues/7789))
|
||||
|
||||
### Contributors
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#7738](https://github.com/dbt-labs/dbt-core/issues/7738))
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#7418](https://github.com/dbt-labs/dbt-core/issues/7418))
|
||||
- [@mirnawong1](https://github.com/mirnawong1) ([#7789](https://github.com/dbt-labs/dbt-core/issues/7789))
|
||||
18
.changes/1.5.3.md
Normal file
18
.changes/1.5.3.md
Normal file
@@ -0,0 +1,18 @@
|
||||
## dbt-core 1.5.3 - July 17, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add --target-path to more CLI subcommands ([#7646](https://github.com/dbt-labs/dbt-core/issues/7646))
|
||||
- Remove limitation on use of sqlparse 0.4.4 ([#7515](https://github.com/dbt-labs/dbt-core/issues/7515))
|
||||
- Move project_root contextvar into events.contextvars ([#7937](https://github.com/dbt-labs/dbt-core/issues/7937))
|
||||
- Inline query emit proper error message ([#7940](https://github.com/dbt-labs/dbt-core/issues/7940))
|
||||
- Allow on_schema_change = fail for contracted incremental models ([#7975](https://github.com/dbt-labs/dbt-core/issues/7975))
|
||||
- Nicer error message if model with enforced contract is missing 'columns' specification ([#7943](https://github.com/dbt-labs/dbt-core/issues/7943))
|
||||
- Detect breaking contract changes to versioned models ([#8030](https://github.com/dbt-labs/dbt-core/issues/8030))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Pin click>=7.0,<8.1.4 ([#8050](https://github.com/dbt-labs/dbt-core/pull/8050))
|
||||
|
||||
### Contributors
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#7646](https://github.com/dbt-labs/dbt-core/issues/7646))
|
||||
10
.changes/1.5.4-rc1.md
Normal file
10
.changes/1.5.4-rc1.md
Normal file
@@ -0,0 +1,10 @@
|
||||
## dbt-core 1.5.4-rc1 - July 27, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
6
.changes/1.5.4/Fixes-20230720-161513.yaml
Normal file
6
.changes/1.5.4/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/1.5.4/Fixes-20230726-104448.yaml
Normal file
6
.changes/1.5.4/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/1.5.4/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/1.5.4/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix use of ConnectionReused logging event
|
||||
time: 2023-01-13T13:25:13.023168-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6168"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Update deprecated github action command
|
||||
time: 2023-01-17T11:17:37.046095-06:00
|
||||
custom:
|
||||
Author: davidbloss
|
||||
Issue: "6153"
|
||||
@@ -4,6 +4,7 @@ headerPath: header.tpl.md
|
||||
versionHeaderPath: ""
|
||||
changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
envPrefix: "CHANGIE_"
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: |-
|
||||
@@ -87,32 +88,44 @@ custom:
|
||||
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
||||
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
||||
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
||||
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
||||
{{- range $team_member := $core_team }}
|
||||
{{- $team_member_lower := lower $team_member }}
|
||||
{{- $maintainers = append $maintainers $team_member_lower }}
|
||||
{{- end }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a single changelog */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- if not (has $authorLower $maintainers)}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core issues */}}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $change.Custom.PR }}
|
||||
{{- else if eq $change.Kind "Docs"}}
|
||||
{{- $changeLink = "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $contributionList := get $contributorDict $author }}
|
||||
{{- $contributionList = append $contributionList $changeLink }}
|
||||
{{- $contributionList = concat $contributionList $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- else }}
|
||||
{{- $contributionList := list $changeLink }}
|
||||
{{- $contributionList := $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
|
||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -1,2 +1,6 @@
|
||||
core/dbt/include/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
performance/runner/Cargo.lock binary
|
||||
core/dbt/events/types_pb2.py binary
|
||||
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -35,6 +35,6 @@ jobs:
|
||||
github.event.pull_request.merged
|
||||
&& contains(github.event.label.name, 'backport')
|
||||
steps:
|
||||
- uses: tibdex/backport@v2.0.2
|
||||
- uses: tibdex/backport@v2.0.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
# **what?**
|
||||
# Cuts a new `*.latest` branch
|
||||
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
||||
# `main` and bumps `main` to the input version.
|
||||
|
||||
# **why?**
|
||||
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
||||
|
||||
# **when?**
|
||||
# This will run when called manually.
|
||||
|
||||
name: Cut new release branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_to_bump_main:
|
||||
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
||||
required: true
|
||||
new_branch_name:
|
||||
description: 'The full name of the new branch (ex. 1.5.latest)'
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cut_branch:
|
||||
name: "Cut branch and clean up main for dbt-core"
|
||||
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||
with:
|
||||
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
||||
new_branch_name: ${{ inputs.new_branch_name }}
|
||||
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||
secrets:
|
||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
165
.github/workflows/generate-cli-api-docs.yml
vendored
165
.github/workflows/generate-cli-api-docs.yml
vendored
@@ -1,165 +0,0 @@
|
||||
# **what?**
|
||||
# On push, if anything in core/dbt/docs or core/dbt/cli has been
|
||||
# created or modified, regenerate the CLI API docs using sphinx.
|
||||
|
||||
# **why?**
|
||||
# We watch for changes in core/dbt/cli because the CLI API docs rely on click
|
||||
# and all supporting flags/params to be generated. We watch for changes in
|
||||
# core/dbt/docs since any changes to sphinx configuration or any of the
|
||||
# .rst files there could result in a differently build final index.html file.
|
||||
|
||||
# **when?**
|
||||
# Whenever a change has been pushed to a branch, and only if there is a diff
|
||||
# between the PR branch and main's core/dbt/cli and or core/dbt/docs dirs.
|
||||
|
||||
# TODO: add bot comment to PR informing contributor that the docs have been committed
|
||||
# TODO: figure out why github action triggered pushes cause github to fail to report
|
||||
# the status of jobs
|
||||
|
||||
name: Generate CLI API docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
CLI_DIR: ${{ github.workspace }}/core/dbt/cli
|
||||
DOCS_DIR: ${{ github.workspace }}/core/dbt/docs
|
||||
DOCS_BUILD_DIR: ${{ github.workspace }}/core/dbt/docs/build
|
||||
|
||||
jobs:
|
||||
check_gen:
|
||||
name: check if generation needed
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == false }}
|
||||
outputs:
|
||||
cli_dir_changed: ${{ steps.check_cli.outputs.cli_dir_changed }}
|
||||
docs_dir_changed: ${{ steps.check_docs.outputs.docs_dir_changed }}
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] print variables"
|
||||
run: |
|
||||
echo "env.CLI_DIR: ${{ env.CLI_DIR }}"
|
||||
echo "env.DOCS_BUILD_DIR: ${{ env.DOCS_BUILD_DIR }}"
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: set shas
|
||||
id: set_shas
|
||||
run: |
|
||||
THIS_SHA=$(git rev-parse @)
|
||||
LAST_SHA=$(git rev-parse @~1)
|
||||
|
||||
echo "this sha: $THIS_SHA"
|
||||
echo "last sha: $LAST_SHA"
|
||||
|
||||
echo "this_sha=$THIS_SHA" >> $GITHUB_OUTPUT
|
||||
echo "last_sha=$LAST_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: check for changes in core/dbt/cli
|
||||
id: check_cli
|
||||
run: |
|
||||
CLI_DIR_CHANGES=$(git diff \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.CLI_DIR }})
|
||||
|
||||
if [ -n "$CLI_DIR_CHANGES" ]; then
|
||||
echo "changes found"
|
||||
echo $CLI_DIR_CHANGES
|
||||
echo "cli_dir_changed=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "cli_dir_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "no changes found"
|
||||
|
||||
- name: check for changes in core/dbt/docs
|
||||
id: check_docs
|
||||
if: steps.check_cli.outputs.cli_dir_changed == 'false'
|
||||
run: |
|
||||
DOCS_DIR_CHANGES=$(git diff --name-only \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.DOCS_DIR }} ':!${{ env.DOCS_BUILD_DIR }}')
|
||||
|
||||
DOCS_BUILD_DIR_CHANGES=$(git diff --name-only \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.DOCS_BUILD_DIR }})
|
||||
|
||||
if [ -n "$DOCS_DIR_CHANGES" ] && [ -z "$DOCS_BUILD_DIR_CHANGES" ]; then
|
||||
echo "changes found"
|
||||
echo $DOCS_DIR_CHANGES
|
||||
echo "docs_dir_changed=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "docs_dir_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "no changes found"
|
||||
|
||||
gen_docs:
|
||||
name: generate docs
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check_gen]
|
||||
if: |
|
||||
needs.check_gen.outputs.cli_dir_changed == 'true'
|
||||
|| needs.check_gen.outputs.docs_dir_changed == 'true'
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] print variables"
|
||||
run: |
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
echo "github head_ref: ${{ github.head_ref }}"
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: install python
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: install dev requirements
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt -r dev-requirements.txt
|
||||
|
||||
- name: generate docs
|
||||
run: |
|
||||
source env/bin/activate
|
||||
cd ${{ env.DOCS_DIR }}
|
||||
|
||||
echo "cleaning existing docs"
|
||||
make clean
|
||||
|
||||
echo "creating docs"
|
||||
make html
|
||||
|
||||
- name: debug
|
||||
run: |
|
||||
echo ">>>>> status"
|
||||
git status
|
||||
echo ">>>>> remotes"
|
||||
git remote -v
|
||||
echo ">>>>> branch"
|
||||
git branch -v
|
||||
echo ">>>>> log"
|
||||
git log --pretty=oneline | head -5
|
||||
|
||||
- name: commit docs
|
||||
run: |
|
||||
git config user.name 'Github Build Bot'
|
||||
git config user.email 'buildbot@fishtownanalytics.com'
|
||||
git commit -am "Add generated CLI API docs"
|
||||
git push -u origin ${{ github.head_ref }}
|
||||
27
.github/workflows/main.yml
vendored
27
.github/workflows/main.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4.3.0
|
||||
@@ -53,12 +53,8 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install pre-commit
|
||||
pre-commit --version
|
||||
python -m pip install mypy==0.942
|
||||
make dev
|
||||
mypy --version
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install -r dev-requirements.txt
|
||||
dbt --version
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
@@ -81,7 +77,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4.3.0
|
||||
@@ -105,7 +101,7 @@ jobs:
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: always()
|
||||
with:
|
||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||
@@ -135,10 +131,15 @@ jobs:
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4.3.0
|
||||
@@ -165,7 +166,7 @@ jobs:
|
||||
tox --version
|
||||
|
||||
- name: Run tests
|
||||
run: tox
|
||||
run: tox -- --ddtrace
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -174,13 +175,13 @@ jobs:
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: always()
|
||||
with:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: always()
|
||||
with:
|
||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
|
||||
@@ -193,7 +194,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4.3.0
|
||||
|
||||
109
.github/workflows/nightly-release.yml
vendored
Normal file
109
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# **what?**
|
||||
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||
# - pass data to release workflow;
|
||||
# - night release will be pushed to GitHub as a draft release;
|
||||
# - night build will be pushed to test PyPI;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process for nightly builds
|
||||
#
|
||||
# **when?**
|
||||
# This workflow runs on schedule or can be run manually on demand.
|
||||
|
||||
name: Nightly Test Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch: # for manual triggering
|
||||
schedule:
|
||||
- cron: 0 9 * * *
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
RELEASE_BRANCH: "main"
|
||||
|
||||
jobs:
|
||||
aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.RELEASE_BRANCH }}
|
||||
|
||||
- name: "Resolve Commit To Release"
|
||||
id: resolve-commit-sha
|
||||
run: |
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Current Version Number"
|
||||
id: version-number-sources
|
||||
run: |
|
||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||
|
||||
- name: "Get Current Date"
|
||||
id: current-date
|
||||
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Generate Nightly Release Version Number"
|
||||
id: nightly-release-version
|
||||
run: |
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
||||
echo "number=$number" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||
|
||||
- name: "Set Release Branch"
|
||||
id: release-branch
|
||||
run: |
|
||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
log-outputs-aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] Log Outputs"
|
||||
run: |
|
||||
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
|
||||
release-github-pypi:
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: true
|
||||
nightly_release: true
|
||||
secrets: inherit
|
||||
30
.github/workflows/release-branch-tests.yml
vendored
30
.github/workflows/release-branch-tests.yml
vendored
@@ -28,7 +28,33 @@ on:
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
fetch-latest-branches:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
latest-branches: ${{ steps.get-latest-branches.outputs.repo-branches }}
|
||||
|
||||
steps:
|
||||
- name: "Fetch dbt-core Latest Branches"
|
||||
uses: dbt-labs/actions/fetch-repo-branches@v1.1.1
|
||||
id: get-latest-branches
|
||||
with:
|
||||
repo_name: ${{ github.event.repository.name }}
|
||||
organization: "dbt-labs"
|
||||
pat: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch_protected_branches_only: true
|
||||
regex: "^1.[0-9]+.latest$"
|
||||
perform_match_method: "match"
|
||||
retries: 3
|
||||
|
||||
- name: "[ANNOTATION] ${{ github.event.repository.name }} - branches to test"
|
||||
run: |
|
||||
title="${{ github.event.repository.name }} - branches to test"
|
||||
message="The workflow will run tests for the following branches of the ${{ github.event.repository.name }} repo: ${{ steps.get-latest-branches.outputs.repo-branches }}"
|
||||
echo "::notice $title::$message"
|
||||
|
||||
kick-off-ci:
|
||||
needs: [fetch-latest-branches]
|
||||
name: Kick-off CI
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -39,7 +65,9 @@ jobs:
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
|
||||
branch: ${{ fromJSON(needs.fetch-latest-branches.outputs.latest-branches) }}
|
||||
include:
|
||||
- branch: 'main'
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
|
||||
12
.github/workflows/release-docker.yml
vendored
12
.github/workflows/release-docker.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
latest: ${{ steps.latest.outputs.latest }}
|
||||
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v3
|
||||
- name: Split version
|
||||
id: version
|
||||
run: |
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
needs: [get_version_meta]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
build_and_push:
|
||||
name: Build images and push to GHCR
|
||||
@@ -76,14 +76,14 @@ jobs:
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
339
.github/workflows/release.yml
vendored
339
.github/workflows/release.yml
vendored
@@ -1,24 +1,110 @@
|
||||
# **what?**
|
||||
# Take the given commit, run unit tests specifically on that sha, build and
|
||||
# package it, and then release to GitHub and PyPi with that specific build
|
||||
|
||||
# Release workflow provides the following steps:
|
||||
# - checkout the given commit;
|
||||
# - validate version in sources and changelog file for given version;
|
||||
# - bump the version and generate a changelog if needed;
|
||||
# - merge all changes to the target branch if needed;
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
|
||||
#
|
||||
# **when?**
|
||||
# This will only run manually with a given sha and version
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPi
|
||||
name: Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: 'The last commit sha in the release'
|
||||
required: true
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: 'The release version number (i.e. 1.0.0b1)'
|
||||
required: true
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
@@ -28,175 +114,116 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
name: Unit test
|
||||
|
||||
log-inputs:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||
echo The branch to release from: ${{ inputs.target_branch }}
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Build script path: ${{ inputs.build_script_path }}
|
||||
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||
echo Package test command: ${{ inputs.package_test_command }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ inputs.sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets: inherit
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: Print variables
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: ${{ inputs.build_script_path }}
|
||||
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||
package_test_command: ${{ inputs.package_test_command }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: |
|
||||
dist/
|
||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
||||
|
||||
test-build:
|
||||
name: verify packages
|
||||
|
||||
needs: [build, unit]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
secrets:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: test-build
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: '.'
|
||||
|
||||
# Need to set an output variable because env variables can't be taken as input
|
||||
# This is needed for the next step with releasing to GitHub
|
||||
- name: Find release type
|
||||
id: release_type
|
||||
env:
|
||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||
run: |
|
||||
echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Creating GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: dbt-core v${{github.event.inputs.version_number}}
|
||||
tag_name: v${{github.event.inputs.version_number}}
|
||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
||||
target_commitish: ${{github.event.inputs.sha}}
|
||||
body: |
|
||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
||||
files: |
|
||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
pypi-release:
|
||||
name: Pypi release
|
||||
name: PyPI Release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
needs: [github-release]
|
||||
|
||||
needs: github-release
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||
|
||||
environment: PypiProd
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: 'dist'
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
- name: Publish distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
secrets:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
|
||||
needs:
|
||||
[
|
||||
bump-version-generate-changelog,
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
|
||||
@@ -30,6 +30,8 @@ jobs:
|
||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||
# tells integration tests to output into json format
|
||||
DBT_LOG_FORMAT: "json"
|
||||
# tell eventmgr to convert logging events into bytes
|
||||
DBT_TEST_BINARY_SERIALIZATION: "true"
|
||||
# Additional test users
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
@@ -37,12 +39,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
|
||||
155
.github/workflows/test-repeater.yml
vendored
Normal file
155
.github/workflows/test-repeater.yml
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
# **what?**
|
||||
# This workflow will test all test(s) at the input path given number of times to determine if it's flaky or not. You can test with any supported OS/Python combination.
|
||||
# This is batched in 10 to allow more test iterations faster.
|
||||
|
||||
# **why?**
|
||||
# Testing if a test is flaky and if a previously flaky test has been fixed. This allows easy testing on supported python versions and OS combinations.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually from dbt-core.
|
||||
|
||||
name: Flaky Tester
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Branch to check out'
|
||||
type: string
|
||||
required: true
|
||||
default: 'main'
|
||||
test_path:
|
||||
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
||||
type: string
|
||||
required: true
|
||||
default: 'tests/functional/...'
|
||||
python_version:
|
||||
description: 'Version of Python to Test Against'
|
||||
type: choice
|
||||
options:
|
||||
- '3.8'
|
||||
- '3.9'
|
||||
- '3.10'
|
||||
- '3.11'
|
||||
os:
|
||||
description: 'OS to run test in'
|
||||
type: choice
|
||||
options:
|
||||
- 'ubuntu-latest'
|
||||
- 'macos-latest'
|
||||
- 'windows-latest'
|
||||
num_runs_per_batch:
|
||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||
type: number
|
||||
required: true
|
||||
default: '50'
|
||||
|
||||
permissions: read-all
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Output Inputs"
|
||||
run: |
|
||||
echo "Branch: ${{ inputs.branch }}"
|
||||
echo "test_path: ${{ inputs.test_path }}"
|
||||
echo "python_version: ${{ inputs.python_version }}"
|
||||
echo "os: ${{ inputs.os }}"
|
||||
echo "num_runs_per_batch: ${{ inputs.num_runs_per_batch }}"
|
||||
|
||||
pytest:
|
||||
runs-on: ${{ inputs.os }}
|
||||
strategy:
|
||||
# run all batches, even if one fails. This informs how flaky the test may be.
|
||||
fail-fast: false
|
||||
# using a matrix to speed up the jobs since the matrix will run in parallel when runners are available
|
||||
matrix:
|
||||
batch: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
|
||||
env:
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
|
||||
- name: "Setup Python"
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "${{ inputs.python_version }}"
|
||||
|
||||
- name: "Setup Dev Environment"
|
||||
run: make dev
|
||||
|
||||
- name: "Set up postgres (linux)"
|
||||
if: inputs.os == 'ubuntu-latest'
|
||||
run: make setup-db
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: "Set up postgres (macos)"
|
||||
if: inputs.os == 'macos-latest'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: "Set up postgres (windows)"
|
||||
if: inputs.os == 'windows-latest'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: "Test Command"
|
||||
id: command
|
||||
run: |
|
||||
test_command="python -m pytest ${{ inputs.test_path }}"
|
||||
echo "test_command=$test_command" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Run test ${{ inputs.num_runs_per_batch }} times"
|
||||
id: pytest
|
||||
run: |
|
||||
set +e
|
||||
for ((i=1; i<=${{ inputs.num_runs_per_batch }}; i++))
|
||||
do
|
||||
echo "Running pytest iteration $i..."
|
||||
python -m pytest --ddtrace ${{ inputs.test_path }}
|
||||
exit_code=$?
|
||||
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
success=$((success + 1))
|
||||
echo "Iteration $i: Success"
|
||||
else
|
||||
failure=$((failure + 1))
|
||||
echo "Iteration $i: Failure"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "==========================="
|
||||
echo "Successful runs: $success"
|
||||
echo "Failed runs: $failure"
|
||||
echo "==========================="
|
||||
echo
|
||||
done
|
||||
|
||||
echo "failure=$failure" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Success and Failure Summary: ${{ inputs.os }}/Python ${{ inputs.python_version }}"
|
||||
run: |
|
||||
echo "Batch: ${{ matrix.batch }}"
|
||||
echo "Successful runs: ${{ steps.pytest.outputs.success }}"
|
||||
echo "Failed runs: ${{ steps.pytest.outputs.failure }}"
|
||||
|
||||
- name: "Error for Failures"
|
||||
if: ${{ steps.pytest.outputs.failure }}
|
||||
run: |
|
||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||
exit 1
|
||||
107
.github/workflows/version-bump.yml
vendored
107
.github/workflows/version-bump.yml
vendored
@@ -20,106 +20,9 @@ on:
|
||||
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo "all variables defined as inputs"
|
||||
echo The version_number: ${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
|
||||
- name: Add Homebrew to PATH
|
||||
run: |
|
||||
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Homebrew packages
|
||||
run: |
|
||||
brew install pre-commit
|
||||
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
||||
brew install changie
|
||||
|
||||
- name: Audit Version and Parse Into Parts
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1
|
||||
with:
|
||||
version: ${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Set branch value
|
||||
id: variables
|
||||
run: |
|
||||
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
git push origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
|
||||
- name: Bump version
|
||||
run: |
|
||||
source env/bin/activate
|
||||
pip install -r dev-requirements.txt
|
||||
env/bin/bumpversion --allow-dirty --new-version ${{ github.event.inputs.version_number }} major
|
||||
git status
|
||||
|
||||
- name: Run changie
|
||||
run: |
|
||||
if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
|
||||
then
|
||||
changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'
|
||||
else
|
||||
changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases
|
||||
fi
|
||||
changie merge
|
||||
git status
|
||||
|
||||
# this step will fail on whitespace errors but also correct them
|
||||
- name: Remove trailing whitespace
|
||||
continue-on-error: true
|
||||
run: |
|
||||
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/*
|
||||
git status
|
||||
|
||||
# this step will fail on newline errors but also correct them
|
||||
- name: Removing extra newlines
|
||||
continue-on-error: true
|
||||
run: |
|
||||
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/*
|
||||
git status
|
||||
|
||||
- name: Commit version bump to branch
|
||||
uses: EndBug/add-and-commit@v7
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'
|
||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{github.ref}}
|
||||
title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'
|
||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
version_bump_and_changie:
|
||||
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
secrets: inherit # ok since what we are calling is internally maintained
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -51,6 +51,7 @@ coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
test.env
|
||||
makefile.test.env
|
||||
*.pytest_cache/
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||
exclude: ^(test/|core/dbt/docs/build/)
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
@@ -38,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.942
|
||||
rev: v0.981
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
273
CHANGELOG.md
273
CHANGELOG.md
@@ -5,6 +5,279 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.5.4-rc1 - July 27, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
|
||||
|
||||
|
||||
## dbt-core 1.5.3 - July 17, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add --target-path to more CLI subcommands ([#7646](https://github.com/dbt-labs/dbt-core/issues/7646))
|
||||
- Remove limitation on use of sqlparse 0.4.4 ([#7515](https://github.com/dbt-labs/dbt-core/issues/7515))
|
||||
- Move project_root contextvar into events.contextvars ([#7937](https://github.com/dbt-labs/dbt-core/issues/7937))
|
||||
- Inline query emit proper error message ([#7940](https://github.com/dbt-labs/dbt-core/issues/7940))
|
||||
- Allow on_schema_change = fail for contracted incremental models ([#7975](https://github.com/dbt-labs/dbt-core/issues/7975))
|
||||
- Nicer error message if model with enforced contract is missing 'columns' specification ([#7943](https://github.com/dbt-labs/dbt-core/issues/7943))
|
||||
- Detect breaking contract changes to versioned models ([#8030](https://github.com/dbt-labs/dbt-core/issues/8030))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Pin click>=7.0,<8.1.4 ([#8050](https://github.com/dbt-labs/dbt-core/pull/8050))
|
||||
|
||||
### Contributors
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#7646](https://github.com/dbt-labs/dbt-core/issues/7646))
|
||||
|
||||
## dbt-core 1.5.2 - June 22, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- add access selection syntax ([#7738](https://github.com/dbt-labs/dbt-core/issues/7738))
|
||||
- Add AdapterRegistered event log message ([#7038](https://github.com/dbt-labs/dbt-core/issues/7038))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add --target-path to dbt snapshot command. ([#7418](https://github.com/dbt-labs/dbt-core/issues/7418))
|
||||
- Constraint rendering fixes: wrap check expression in parentheses, foreign key 'references', support expression in all constraint types ([#7417](https://github.com/dbt-labs/dbt-core/issues/7417), [#7480](https://github.com/dbt-labs/dbt-core/issues/7480), [#7416](https://github.com/dbt-labs/dbt-core/issues/7416))
|
||||
- Fix warning messages for deprecated dbt_project.yml configs ([#7424](https://github.com/dbt-labs/dbt-core/issues/7424))
|
||||
- Respect column 'quote' config in model contracts ([#7370](https://github.com/dbt-labs/dbt-core/issues/7370))
|
||||
- Improve warnings for constraints and materialization types ([#7335](https://github.com/dbt-labs/dbt-core/issues/7335))
|
||||
- Incorrect paths used for "target" and "state" directories ([#7465](https://github.com/dbt-labs/dbt-core/issues/7465))
|
||||
- Using version 0 works when resolving single model ([#7372](https://github.com/dbt-labs/dbt-core/issues/7372))
|
||||
- Fix empty --warn-error-options error message ([#7730](https://github.com/dbt-labs/dbt-core/issues/7730))
|
||||
- send sql header on contract enforcement ([#7714](https://github.com/dbt-labs/dbt-core/issues/7714))
|
||||
- Fix path selector when using project-dir ([#7819](https://github.com/dbt-labs/dbt-core/issues/7819))
|
||||
- Allow dbt show --inline preview of private models ([#7837](https://github.com/dbt-labs/dbt-core/issues/7837))
|
||||
- Updating this error message to point to the correct URL ([#7789](https://github.com/dbt-labs/dbt-core/issues/7789))
|
||||
|
||||
### Contributors
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#7738](https://github.com/dbt-labs/dbt-core/issues/7738))
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#7418](https://github.com/dbt-labs/dbt-core/issues/7418))
|
||||
- [@mirnawong1](https://github.com/mirnawong1) ([#7789](https://github.com/dbt-labs/dbt-core/issues/7789))
|
||||
|
||||
## dbt-core 1.5.1 - May 30, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- fix typo in unpacking statically parsed ref ([#7364](https://github.com/dbt-labs/dbt-core/issues/7364))
|
||||
- Fix inverted `--print/--no-print` flag ([#7517](https://github.com/dbt-labs/dbt-core/issues/7517))
|
||||
- Back-compat for previous return type of 'collect_freshness' macro ([#7489](https://github.com/dbt-labs/dbt-core/issues/7489))
|
||||
- print model version in dbt show if specified ([#7407](https://github.com/dbt-labs/dbt-core/issues/7407))
|
||||
- Allow missing `profiles.yml` for `dbt deps` and `dbt init` ([#7511](https://github.com/dbt-labs/dbt-core/issues/7511))
|
||||
- Do not rewrite manifest.json during 'docs serve' command ([#7553](https://github.com/dbt-labs/dbt-core/issues/7553))
|
||||
- Pin protobuf to greater than 4.0.0 ([#7565](https://github.com/dbt-labs/dbt-core/issues/7565))
|
||||
- Throw error for duplicated versioned and unversioned models ([#7487](https://github.com/dbt-labs/dbt-core/issues/7487))
|
||||
- Fix: Relative project paths weren't working with deps ([#7491](https://github.com/dbt-labs/dbt-core/issues/7491))
|
||||
- Fall back if rendering the password field fails. ([#7629](https://github.com/dbt-labs/dbt-core/issues/7629))
|
||||
- Stringify flag paths for Jinja context ([#7495](https://github.com/dbt-labs/dbt-core/issues/7495))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Remove legacy file logger code ([#NA](https://github.com/dbt-labs/dbt-core/issues/NA))
|
||||
|
||||
### Contributors
|
||||
- [@iknox-fa](https://github.com/iknox-fa) ([#7491](https://github.com/dbt-labs/dbt-core/issues/7491), [#NA](https://github.com/dbt-labs/dbt-core/issues/NA))
|
||||
- [@thomasgjerdekog](https://github.com/thomasgjerdekog) ([#7517](https://github.com/dbt-labs/dbt-core/issues/7517))
|
||||
|
||||
## dbt-core 1.5.0 - April 27, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Allow `--select` and `--exclude` multiple times ([#7158](https://github.com/dbt-labs/dbt-core/issues/7158))
|
||||
- Specifying "log-path" and "target-path" in "dbt_project.yml" is deprecated. This functionality will be removed in a future version of dbt-core. If you need to specify a custom path for logs or artifacts, please set via CLI flag or env var instead. ([#6882](https://github.com/dbt-labs/dbt-core/issues/6882))
|
||||
- Remove exception functions marked as deprecated in 1.4 release ([#6578](https://github.com/dbt-labs/dbt-core/issues/6578))
|
||||
|
||||
### Features
|
||||
|
||||
- Data type constraints are now native to SQL table materializations. Enforce columns are specific data types and not null depending on database functionality. ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
- Have dbt debug spit out structured json logs with flags enabled. ([#5353](https://github.com/dbt-labs/dbt-core/issues/5353))
|
||||
- ✨ add unix-style wildcard selector method ([#6598](https://github.com/dbt-labs/dbt-core/issues/6598))
|
||||
- add adapter_response to dbt test and freshness result ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- add support for DBT_PROJECT_DIR env var ([#6078](https://github.com/dbt-labs/dbt-core/issues/6078))
|
||||
- Improve error message for packages missing `dbt_project.yml` ([#6663](https://github.com/dbt-labs/dbt-core/issues/6663))
|
||||
- Make project version optional ([#6603](https://github.com/dbt-labs/dbt-core/issues/6603))
|
||||
- Adjust makefile to have clearer instructions for CI env var changes. ([#6689](https://github.com/dbt-labs/dbt-core/issues/6689))
|
||||
- Stand-alone Python module for PostgresColumn ([#6772](https://github.com/dbt-labs/dbt-core/issues/6772))
|
||||
- Enable diff based partial parsing ([#6592](https://github.com/dbt-labs/dbt-core/issues/6592))
|
||||
- Exposure owner requires one of name or email keys, and accepts additional arbitrary keys ([#6833](https://github.com/dbt-labs/dbt-core/issues/6833))
|
||||
- Parse 'group' resource ([#6921](https://github.com/dbt-labs/dbt-core/issues/6921))
|
||||
- parse 'group' config on groupable nodes ([#6823](https://github.com/dbt-labs/dbt-core/issues/6823))
|
||||
- Implemented new log cli parameters for finer-grained control. ([#6639](https://github.com/dbt-labs/dbt-core/issues/6639))
|
||||
- Add access attribute to parsed nodes ([#6824](https://github.com/dbt-labs/dbt-core/issues/6824))
|
||||
- Enforce contracts on models materialized as tables, views, and incremental ([#6751](https://github.com/dbt-labs/dbt-core/issues/6751), [#7034](https://github.com/dbt-labs/dbt-core/issues/7034), [#6756](https://github.com/dbt-labs/dbt-core/issues/6756), [#7154](https://github.com/dbt-labs/dbt-core/issues/7154))
|
||||
- Add ability to select by group resource ([#6825](https://github.com/dbt-labs/dbt-core/issues/6825))
|
||||
- Disallow refing private model across groups ([#6826](https://github.com/dbt-labs/dbt-core/issues/6826))
|
||||
- make version configs optional ([#7054](https://github.com/dbt-labs/dbt-core/issues/7054))
|
||||
- [CT-1584] New top level commands: interactive compile ([#6358](https://github.com/dbt-labs/dbt-core/issues/6358))
|
||||
- Make model contracts agnostic to ordering ([#6975](https://github.com/dbt-labs/dbt-core/issues/6975), [#7064](https://github.com/dbt-labs/dbt-core/issues/7064))
|
||||
- Unified constraints and check_constraints properties for columns and models ([#7066](https://github.com/dbt-labs/dbt-core/issues/7066))
|
||||
- Switch from betterproto to google protobuf and enable more flexible meta dictionary in logs ([#6832](https://github.com/dbt-labs/dbt-core/issues/6832))
|
||||
- Ignore duplicate edges in subgraph to speed up dbt build ([#7191](https://github.com/dbt-labs/dbt-core/issues/7191))
|
||||
- Support setting of callbacks for programmatic uses of `dbtRunner` ([#6763](https://github.com/dbt-labs/dbt-core/issues/6763))
|
||||
- Detect breaking changes to contracts in state:modified check ([#6869](https://github.com/dbt-labs/dbt-core/issues/6869))
|
||||
- New command: dbt show ([#7207](https://github.com/dbt-labs/dbt-core/issues/7207), [#7179](https://github.com/dbt-labs/dbt-core/issues/7179), [#6359](https://github.com/dbt-labs/dbt-core/issues/6359))
|
||||
- Added prettier printing to ContractError class ([#7209](https://github.com/dbt-labs/dbt-core/issues/7209))
|
||||
- Add support for model-level constraints ([#6754](https://github.com/dbt-labs/dbt-core/issues/6754))
|
||||
- model versions ([##7263](https://github.com/dbt-labs/dbt-core/issues/#7263))
|
||||
- Add relation info (database, schema, alias) to node_info dictionary in structured logging ([#6724](https://github.com/dbt-labs/dbt-core/issues/6724))
|
||||
- Add --no-populate-cache to optionally skip relation cache population ([#1751](https://github.com/dbt-labs/dbt-core/issues/1751))
|
||||
- select resources by patch path ([#7315](https://github.com/dbt-labs/dbt-core/issues/7315))
|
||||
- Add version selector method ([#7199](https://github.com/dbt-labs/dbt-core/issues/7199))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Remove trailing slashes from source paths (#6102) ([#6102](https://github.com/dbt-labs/dbt-core/issues/6102))
|
||||
- add merge_exclude_columns adapter tests ([#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
- Snapshot strategies: add a newline for subquery ([#6781](https://github.com/dbt-labs/dbt-core/issues/6781))
|
||||
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||
- Add double type to list of float column types for the column class ([#6876](https://github.com/dbt-labs/dbt-core/issues/6876))
|
||||
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||
- Reapply logging fixes which were accidentally reverted ([#6936](https://github.com/dbt-labs/dbt-core/issues/6936))
|
||||
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||
- Readd initialization events, --log-cache-events in new CLI ([#6933](https://github.com/dbt-labs/dbt-core/issues/6933))
|
||||
- Fix previous state tests and disabled exposures, metrics ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752), [#6753](https://github.com/dbt-labs/dbt-core/issues/6753))
|
||||
- Make use of hashlib.md5() FIPS compliant ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
- add timeout for dbt --version command ([#6992](https://github.com/dbt-labs/dbt-core/issues/6992))
|
||||
- Fix compilation logic for ephemeral nodes ([#6885](https://github.com/dbt-labs/dbt-core/issues/6885))
|
||||
- Fix semver comparison logic by ensuring numeric values ([#7039](https://github.com/dbt-labs/dbt-core/issues/7039))
|
||||
- add pytz dependency ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
- allow adapters to change model name resolution in py models ([#7114](https://github.com/dbt-labs/dbt-core/issues/7114))
|
||||
- Add exception handling in postflight decorator to address exit codes ([#7010](https://github.com/dbt-labs/dbt-core/issues/7010))
|
||||
- Recreates missing tracking events ([#6097](https://github.com/dbt-labs/dbt-core/issues/6097), [#6098](https://github.com/dbt-labs/dbt-core/issues/6098))
|
||||
- Fix partial parsing error due to not requiring "version" ([#7236](https://github.com/dbt-labs/dbt-core/issues/7236))
|
||||
- Handle internal exceptions ([#7118](https://github.com/dbt-labs/dbt-core/issues/7118))
|
||||
- Improved failed event serialization handling and associated tests ([#7113](https://github.com/dbt-labs/dbt-core/issues/7113), [#7108](https://github.com/dbt-labs/dbt-core/issues/7108), [#6568](https://github.com/dbt-labs/dbt-core/issues/6568))
|
||||
- Fix handling of artifacts in read_and_check_versions ([#7252](https://github.com/dbt-labs/dbt-core/issues/7252))
|
||||
- Stringify datetimes in logging for prettier messages ([#7255](https://github.com/dbt-labs/dbt-core/issues/7255))
|
||||
- avoid dbtRunner default callbacks being shared across instances ([#7278](https://github.com/dbt-labs/dbt-core/issues/7278))
|
||||
- Ensure same_contract is called for state:modified ([#7282](https://github.com/dbt-labs/dbt-core/issues/7282))
|
||||
- Avoid revoking grants for views when `copy_grants=true` ([#7280](https://github.com/dbt-labs/dbt-core/issues/7280))
|
||||
- Duplicated flags now throw errors instead of being overidden by parent-level flag ([#6913](https://github.com/dbt-labs/dbt-core/issues/6913))
|
||||
- Ensure that invocation_id changes between programmatic invocations. ([#7197](https://github.com/dbt-labs/dbt-core/issues/7197))
|
||||
- Adding a new column is not a breaking contract change ([#7332](https://github.com/dbt-labs/dbt-core/issues/7332))
|
||||
- fix versioned model selection in subdirectories ([#7348](https://github.com/dbt-labs/dbt-core/issues/7348))
|
||||
- safe version attribute access in _check_resource_uniqueness ([#7375](https://github.com/dbt-labs/dbt-core/issues/7375))
|
||||
- Fix dbt command missing target-path param ([# 7411](https://github.com/dbt-labs/dbt-core/issues/ 7411))
|
||||
- Fix v0 ref resolution ([#7408](https://github.com/dbt-labs/dbt-core/issues/7408))
|
||||
- fix groupable node partial parsing, raise DbtReferenceError at runtime for safety ([#7437](https://github.com/dbt-labs/dbt-core/issues/7437))
|
||||
- Fix partial parsing of latest_version changes for downstream references ([#7369](https://github.com/dbt-labs/dbt-core/issues/7369))
|
||||
|
||||
### Docs
|
||||
|
||||
- Improve displayed message under "Arguments" section for argumentless macro ([dbt-docs/#358](https://github.com/dbt-labs/dbt-docs/issues/358))
|
||||
- update link to installation instructions ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||
- Searchable column descriptions ([dbt-docs/#140](https://github.com/dbt-labs/dbt-docs/issues/140), [dbt-docs/#322](https://github.com/dbt-labs/dbt-docs/issues/322), [dbt-docs/#369](https://github.com/dbt-labs/dbt-docs/issues/369))
|
||||
- Add access property to model details ([dbt-docs/#381](https://github.com/dbt-labs/dbt-docs/issues/381))
|
||||
- Display model owner by name and email ([dbt-docs/#377](https://github.com/dbt-labs/dbt-docs/issues/377))
|
||||
- Add view of public models sorted by group to left navigation ([dbt-docs/#379](https://github.com/dbt-labs/dbt-docs/issues/379))
|
||||
- Distiguish node "access" in the DAG with node borders & opacity. ([dbt-docs/#378](https://github.com/dbt-labs/dbt-docs/issues/378))
|
||||
- Fix JSON path to package overview docs ([dbt-docs/#390](https://github.com/dbt-labs/dbt-docs/issues/390))
|
||||
- Add selection by group to DAG ([dbt-docs/#380](https://github.com/dbt-labs/dbt-docs/issues/380))
|
||||
- Add support for model versions ([dbt-docs/#406](https://github.com/dbt-labs/dbt-docs/issues/406))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- [CT-921] dbt compile works in click ([#5545](https://github.com/dbt-labs/dbt-core/issues/5545))
|
||||
- Fix use of ConnectionReused logging event ([#6168](https://github.com/dbt-labs/dbt-core/issues/6168))
|
||||
- Port docs tests to pytest ([#6573](https://github.com/dbt-labs/dbt-core/issues/6573))
|
||||
- Update deprecated github action command ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- dbt snapshot works in click ([#5554](https://github.com/dbt-labs/dbt-core/issues/5554))
|
||||
- dbt list working with click ([#5549](https://github.com/dbt-labs/dbt-core/issues/5549))
|
||||
- Add dbt run-operation to click CLI ([#5552](https://github.com/dbt-labs/dbt-core/issues/5552))
|
||||
- dbt build working with new click framework ([#5541](https://github.com/dbt-labs/dbt-core/issues/5541))
|
||||
- dbt docs generate works with new click framework ([#5543](https://github.com/dbt-labs/dbt-core/issues/5543))
|
||||
- Replaced the EmptyLine event with a more general Formatting event, and added a Note event. ([#6481](https://github.com/dbt-labs/dbt-core/issues/6481))
|
||||
- Small optimization on manifest parsing benefitting large DAGs ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- Revised and simplified various structured logging events ([#6664](https://github.com/dbt-labs/dbt-core/issues/6664), [#6665](https://github.com/dbt-labs/dbt-core/issues/6665), [#6666](https://github.com/dbt-labs/dbt-core/issues/6666))
|
||||
- dbt init works with click ([#5548](https://github.com/dbt-labs/dbt-core/issues/5548))
|
||||
- [CT-920][CT-1900] Create Click CLI runner and use it to fix dbt docs commands ([#5544](https://github.com/dbt-labs/dbt-core/issues/5544), [#6722](https://github.com/dbt-labs/dbt-core/issues/6722))
|
||||
- Migrate debug task to click ([#5546](https://github.com/dbt-labs/dbt-core/issues/5546))
|
||||
- Optimized GraphQueue to remove graph analysis bottleneck in large dags. ([#6759](https://github.com/dbt-labs/dbt-core/issues/6759))
|
||||
- Implement --version for click cli ([#6757](https://github.com/dbt-labs/dbt-core/issues/6757))
|
||||
- [CT-1841] Convert custom target test to Pytest ([#6638](https://github.com/dbt-labs/dbt-core/issues/6638))
|
||||
- Remove BigQuery-specific btye abbreviations ([#6741](https://github.com/dbt-labs/dbt-core/issues/6741))
|
||||
- warn_error/warn_error_options mutual exclusivity in click ([#6579](https://github.com/dbt-labs/dbt-core/issues/6579))
|
||||
- Enables the new Click Cli on the commandline! 🚀 ([#6784](https://github.com/dbt-labs/dbt-core/issues/6784))
|
||||
- Lazily call --version ([#6812](https://github.com/dbt-labs/dbt-core/issues/6812))
|
||||
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
- flags.THREADS defaults to None ([#6887](https://github.com/dbt-labs/dbt-core/issues/6887))
|
||||
- Fixing target type exposure error ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- Test binary serialization of logging events ([#6852](https://github.com/dbt-labs/dbt-core/issues/6852))
|
||||
- Treat contract config as a python object ([#6748](https://github.com/dbt-labs/dbt-core/issues/6748), [#7184](https://github.com/dbt-labs/dbt-core/issues/7184))
|
||||
- Add deprecation warning for DBT_NO_PRINT ([#6960](https://github.com/dbt-labs/dbt-core/issues/6960))
|
||||
- Make output_keys click param multi-option instead of a string ([#6676](https://github.com/dbt-labs/dbt-core/issues/6676))
|
||||
- Remove cli doc generation workflow ([#7088](https://github.com/dbt-labs/dbt-core/issues/7088))
|
||||
- Move validation of group earlier ([#7087](https://github.com/dbt-labs/dbt-core/issues/7087))
|
||||
- Deprecate additional environment variables ([#6903](https://github.com/dbt-labs/dbt-core/issues/6903))
|
||||
- Add CommandCompleted event, and fire it upon completion of every command ([#6878](https://github.com/dbt-labs/dbt-core/issues/6878))
|
||||
- Improves build times for common selections by improving subgraph calculation ([#7195](https://github.com/dbt-labs/dbt-core/issues/7195))
|
||||
- Remove upper pin for hologram/jsonschema ([#6775](https://github.com/dbt-labs/dbt-core/issues/6775))
|
||||
- Generalize constraint compatibility warnings ([#7067](https://github.com/dbt-labs/dbt-core/issues/7067))
|
||||
- Add kwargs support to dbtRunner ([#7070](https://github.com/dbt-labs/dbt-core/issues/7070))
|
||||
- Add unique_id to ShowNode and CompiledNode logging events ([#7305](https://github.com/dbt-labs/dbt-core/issues/7305))
|
||||
- Prettify message for ListRelations event ([#7310](https://github.com/dbt-labs/dbt-core/issues/7310))
|
||||
- `Parse` now returns manifest when invoked via dbtRunner ([#6547](https://github.com/dbt-labs/dbt-core/issues/6547))
|
||||
- Track data about group, access, contract, version usage ([#7170](https://github.com/dbt-labs/dbt-core/issues/7170), [#7171](https://github.com/dbt-labs/dbt-core/issues/7171))
|
||||
- Update docs link in ContractBreakingChangeError message ([#7366](https://github.com/dbt-labs/dbt-core/issues/7366))
|
||||
- Update --help text for cache-related parameters ([#7381](https://github.com/dbt-labs/dbt-core/issues/7381))
|
||||
- Small UX improvements to model versions: Support defining latest_version in unsuffixed file by default. Notify on unpinned ref when a prerelease version is available. ([#7443](https://github.com/dbt-labs/dbt-core/issues/7443))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pathspec requirement from <0.11,>=0.9 to >=0.9,<0.12 in /core ([#6737](https://github.com/dbt-labs/dbt-core/pull/6737))
|
||||
- Bump ubuntu from 22.04 to 23.04 ([#6865](https://github.com/dbt-labs/dbt-core/pull/6865))
|
||||
- Revert hoisting dbt.cli.main into the dbt.name namespace ([#](https://github.com/dbt-labs/dbt-core/pull/))
|
||||
- Bump python from 3.11.1-slim-bullseye to 3.11.2-slim-bullseye in /docker ([#7196](https://github.com/dbt-labs/dbt-core/pull/7196))
|
||||
- Bump black from 22.12.0 to 23.3.0 ([#7243](https://github.com/dbt-labs/dbt-core/pull/7243))
|
||||
- Bump mashumaro[msgpack] from 3.3.1 to 3.6 ([#7294](https://github.com/dbt-labs/dbt-core/pull/7294))
|
||||
|
||||
### Dependency
|
||||
|
||||
- Bump mypy from 0.971 to 0.981 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- Bump python from 3.10.7-slim-bullseye to 3.11.1-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- Bump black from 22.10.0 to 22.12.0 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
|
||||
### Contributors
|
||||
- [@@ttusing](https://github.com/@ttusing) ([#7195](https://github.com/dbt-labs/dbt-core/issues/7195))
|
||||
- [@Goodkat](https://github.com/Goodkat) ([#6992](https://github.com/dbt-labs/dbt-core/issues/6992))
|
||||
- [@MartinGuindon](https://github.com/MartinGuindon) ([#358](https://github.com/dbt-labs/dbt-core/issues/358))
|
||||
- [@MatthieuBlais](https://github.com/MatthieuBlais) ([#7191](https://github.com/dbt-labs/dbt-core/issues/7191))
|
||||
- [@RobbertDM](https://github.com/RobbertDM) ([#6781](https://github.com/dbt-labs/dbt-core/issues/6781))
|
||||
- [@aezomz](https://github.com/aezomz) ([#2964](https://github.com/dbt-labs/dbt-core/issues/2964))
|
||||
- [@benallard](https://github.com/benallard) ([#7294](https://github.com/dbt-labs/dbt-core/pull/7294))
|
||||
- [@boxysean](https://github.com/boxysean) ([#6697](https://github.com/dbt-labs/dbt-core/issues/6697))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6928](https://github.com/dbt-labs/dbt-core/issues/6928))
|
||||
- [@chamini2](https://github.com/chamini2) ([#7278](https://github.com/dbt-labs/dbt-core/issues/7278))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#7054](https://github.com/dbt-labs/dbt-core/issues/7054), [#7315](https://github.com/dbt-labs/dbt-core/issues/7315), [#6699](https://github.com/dbt-labs/dbt-core/issues/6699))
|
||||
- [@davidbloss](https://github.com/davidbloss) ([#6153](https://github.com/dbt-labs/dbt-core/issues/6153))
|
||||
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||
- [@jmg-duarte](https://github.com/jmg-duarte) ([#6102](https://github.com/dbt-labs/dbt-core/issues/6102))
|
||||
- [@kentkr](https://github.com/kentkr) ([#7209](https://github.com/dbt-labs/dbt-core/issues/7209))
|
||||
- [@leo-schick](https://github.com/leo-schick) ([#6078](https://github.com/dbt-labs/dbt-core/issues/6078))
|
||||
- [@nielspardon](https://github.com/nielspardon) ([#6900](https://github.com/dbt-labs/dbt-core/issues/6900))
|
||||
- [@rlh1994](https://github.com/rlh1994) ([#6876](https://github.com/dbt-labs/dbt-core/issues/6876), [#390](https://github.com/dbt-labs/dbt-core/issues/390))
|
||||
- [@ryancharris](https://github.com/ryancharris) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
- [@seub](https://github.com/seub) ([#6603](https://github.com/dbt-labs/dbt-core/issues/6603))
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#6079](https://github.com/dbt-labs/dbt-core/issues/6079))
|
||||
- [@z3z1ma](https://github.com/z3z1ma) ([#6598](https://github.com/dbt-labs/dbt-core/issues/6598))
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# See `/docker` for a generic and production-ready docker file
|
||||
##
|
||||
|
||||
FROM ubuntu:22.04
|
||||
FROM ubuntu:23.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
|
||||
41
Makefile
41
Makefile
@@ -6,29 +6,42 @@ ifeq ($(USE_DOCKER),true)
|
||||
DOCKER_CMD := docker-compose run --rm test
|
||||
endif
|
||||
|
||||
LOGS_DIR := ./logs
|
||||
#
|
||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||
# DBT_TEST_USER_1=user
|
||||
# LOG_DIR="dir with a space in it"
|
||||
#
|
||||
# Warn: Restrict each line to one variable only.
|
||||
#
|
||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||
include ./makefile.test.env
|
||||
endif
|
||||
|
||||
# Optional flag to invoke tests using our CI env.
|
||||
# But we always want these active for structured
|
||||
# log testing.
|
||||
CI_FLAGS =\
|
||||
DBT_TEST_USER_1=dbt_test_user_1\
|
||||
DBT_TEST_USER_2=dbt_test_user_2\
|
||||
DBT_TEST_USER_3=dbt_test_user_3\
|
||||
RUSTFLAGS="-D warnings"\
|
||||
LOG_DIR=./logs\
|
||||
DBT_LOG_FORMAT=json
|
||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||
|
||||
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
pip install -r dev-requirements.txt
|
||||
pip install -r editable-requirements.txt
|
||||
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@\
|
||||
pre-commit install
|
||||
|
||||
.PHONY: proto_types
|
||||
proto_types: ## generates google protobuf python file from types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@\
|
||||
@@ -66,7 +79,7 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
@\
|
||||
$(if $(USE_CI_FLAGS), $(CI_FLAGS)) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
@@ -76,9 +89,9 @@ integration-fail-fast: .env ## Runs postgres integration tests with py-integrati
|
||||
.PHONY: interop
|
||||
interop: clean
|
||||
@\
|
||||
mkdir $(LOGS_DIR) && \
|
||||
mkdir $(LOG_DIR) && \
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||
LOG_DIR=$(LOGS_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
|
||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Join the dbt Community
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa
|
||||
from dbt.adapters.base.meta import available # noqa
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa
|
||||
from dbt.adapters.base.relation import ( # noqa
|
||||
from dbt.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.base.meta import available # noqa: F401
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||
from dbt.adapters.base.relation import ( # noqa: F401
|
||||
BaseRelation,
|
||||
RelationType,
|
||||
SchemaSearchMap,
|
||||
)
|
||||
from dbt.adapters.base.column import Column # noqa
|
||||
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter, PythonJobHelper # noqa
|
||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa
|
||||
from dbt.adapters.base.column import Column # noqa: F401
|
||||
from dbt.adapters.base.impl import ( # noqa: F401
|
||||
AdapterConfig,
|
||||
BaseAdapter,
|
||||
PythonJobHelper,
|
||||
ConstraintSupport,
|
||||
)
|
||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401
|
||||
|
||||
@@ -60,6 +60,7 @@ class Column:
|
||||
"float",
|
||||
"double precision",
|
||||
"float8",
|
||||
"double",
|
||||
]
|
||||
|
||||
def is_integer(self) -> bool:
|
||||
|
||||
@@ -2,46 +2,48 @@ import abc
|
||||
from concurrent.futures import as_completed, Future
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
import time
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Optional,
|
||||
Tuple,
|
||||
Callable,
|
||||
Iterable,
|
||||
Type,
|
||||
Dict,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Iterator,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
DbtValidationError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
QuoteConfigTypeError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
DbtRuntimeError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNullError,
|
||||
UnexpectedNonTimestampError,
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterConfig,
|
||||
ConnectionManagerProtocol,
|
||||
)
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
@@ -53,8 +55,10 @@ from dbt.events.types import (
|
||||
CodeExecution,
|
||||
CodeExecutionStatus,
|
||||
CatalogGenerationError,
|
||||
ConstraintNotSupported,
|
||||
ConstraintNotEnforced,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
@@ -66,13 +70,19 @@ from dbt.adapters.base.relation import (
|
||||
)
|
||||
from dbt.adapters.base import Column as BaseColumn
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_msg
|
||||
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
ENFORCED = "enforced"
|
||||
NOT_ENFORCED = "not_enforced"
|
||||
NOT_SUPPORTED = "not_supported"
|
||||
|
||||
|
||||
def _expect_row_value(key: str, row: agate.Row):
|
||||
if key not in row.keys():
|
||||
raise DbtInternalError(
|
||||
@@ -177,6 +187,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
- truncate_relation
|
||||
- rename_relation
|
||||
- get_columns_in_relation
|
||||
- get_column_schema_from_query
|
||||
- expand_column_types
|
||||
- list_relations_without_caching
|
||||
- is_cancelable
|
||||
@@ -203,6 +214,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# for use in materializations
|
||||
AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig
|
||||
|
||||
CONSTRAINT_SUPPORT = {
|
||||
ConstraintType.check: ConstraintSupport.NOT_SUPPORTED,
|
||||
ConstraintType.not_null: ConstraintSupport.ENFORCED,
|
||||
ConstraintType.unique: ConstraintSupport.NOT_ENFORCED,
|
||||
ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED,
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
@@ -269,6 +288,19 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
|
||||
|
||||
@available.parse(lambda *a, **k: [])
|
||||
def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
|
||||
"""Get a list of the Columns with names and data types from the given sql."""
|
||||
_, cursor = self.connections.add_select_query(sql)
|
||||
columns = [
|
||||
self.Column.create(
|
||||
column_name, self.connections.data_type_code_to_name(column_type_code)
|
||||
)
|
||||
# https://peps.python.org/pep-0249/#description
|
||||
for column_name, column_type_code, *_ in cursor.description
|
||||
]
|
||||
return columns
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
@@ -704,11 +736,23 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# we can't build the relations cache because we don't have a
|
||||
# manifest so we can't run any operations.
|
||||
relations = self.list_relations_without_caching(schema_relation)
|
||||
|
||||
# if the cache is already populated, add this schema in
|
||||
# otherwise, skip updating the cache and just ignore
|
||||
if self.cache:
|
||||
for relation in relations:
|
||||
self.cache.add(relation)
|
||||
if not relations:
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
self.cache.update_schemas([(database, schema)])
|
||||
|
||||
fire_event(
|
||||
ListRelations(
|
||||
database=cast_to_str(database),
|
||||
schema=schema,
|
||||
relations=[_make_ref_key_msg(x) for x in relations],
|
||||
relations=[_make_ref_key_dict(x) for x in relations],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -943,7 +987,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Dict[str, Any] = None,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> agate.Table:
|
||||
) -> AttrDict:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
|
||||
:param macro_name: The name of the macro to execute.
|
||||
@@ -1028,7 +1072,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest)
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
@@ -1060,7 +1104,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Dict[str, Any]:
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1069,7 +1113,19 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
}
|
||||
|
||||
# run the macro
|
||||
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
# in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly
|
||||
# starting in v1.5, by default, we return both the table and the adapter response (metadata about the query)
|
||||
result: Union[
|
||||
AttrDict, # current: contains AdapterResponse + agate.Table
|
||||
agate.Table, # previous: just table
|
||||
]
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
if isinstance(result, agate.Table):
|
||||
deprecations.warn("collect-freshness-return-signature")
|
||||
adapter_response = None
|
||||
table = result
|
||||
else:
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||
# the current time according to the db.
|
||||
if len(table) != 1 or len(table[0]) != 2:
|
||||
@@ -1083,11 +1139,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
return {
|
||||
freshness = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
@@ -1249,6 +1306,119 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# This returns a callable macro
|
||||
return model_context[macro_name]
|
||||
|
||||
@classmethod
|
||||
def _parse_column_constraint(cls, raw_constraint: Dict[str, Any]) -> ColumnLevelConstraint:
|
||||
try:
|
||||
ColumnLevelConstraint.validate(raw_constraint)
|
||||
return ColumnLevelConstraint.from_dict(raw_constraint)
|
||||
except Exception:
|
||||
raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
|
||||
|
||||
@classmethod
|
||||
def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]:
|
||||
"""Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
|
||||
rendering."""
|
||||
constraint_expression = constraint.expression or ""
|
||||
|
||||
rendered_column_constraint = None
|
||||
if constraint.type == ConstraintType.check and constraint_expression:
|
||||
rendered_column_constraint = f"check ({constraint_expression})"
|
||||
elif constraint.type == ConstraintType.not_null:
|
||||
rendered_column_constraint = f"not null {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.unique:
|
||||
rendered_column_constraint = f"unique {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.primary_key:
|
||||
rendered_column_constraint = f"primary key {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.foreign_key and constraint_expression:
|
||||
rendered_column_constraint = f"references {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.custom and constraint_expression:
|
||||
rendered_column_constraint = constraint_expression
|
||||
|
||||
if rendered_column_constraint:
|
||||
rendered_column_constraint = rendered_column_constraint.strip()
|
||||
|
||||
return rendered_column_constraint
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List:
|
||||
rendered_column_constraints = []
|
||||
|
||||
for v in raw_columns.values():
|
||||
col_name = cls.quote(v["name"]) if v.get("quote") else v["name"]
|
||||
rendered_column_constraint = [f"{col_name} {v['data_type']}"]
|
||||
for con in v.get("constraints", None):
|
||||
constraint = cls._parse_column_constraint(con)
|
||||
c = cls.process_parsed_constraint(constraint, cls.render_column_constraint)
|
||||
if c is not None:
|
||||
rendered_column_constraint.append(c)
|
||||
rendered_column_constraints.append(" ".join(rendered_column_constraint))
|
||||
|
||||
return rendered_column_constraints
|
||||
|
||||
@classmethod
|
||||
def process_parsed_constraint(
|
||||
cls, parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint], render_func
|
||||
) -> Optional[str]:
|
||||
if (
|
||||
parsed_constraint.warn_unsupported
|
||||
and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED
|
||||
):
|
||||
warn_or_error(
|
||||
ConstraintNotSupported(constraint=parsed_constraint.type.value, adapter=cls.type())
|
||||
)
|
||||
if (
|
||||
parsed_constraint.warn_unenforced
|
||||
and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_ENFORCED
|
||||
):
|
||||
warn_or_error(
|
||||
ConstraintNotEnforced(constraint=parsed_constraint.type.value, adapter=cls.type())
|
||||
)
|
||||
if cls.CONSTRAINT_SUPPORT[parsed_constraint.type] != ConstraintSupport.NOT_SUPPORTED:
|
||||
return render_func(parsed_constraint)
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _parse_model_constraint(cls, raw_constraint: Dict[str, Any]) -> ModelLevelConstraint:
|
||||
try:
|
||||
ModelLevelConstraint.validate(raw_constraint)
|
||||
c = ModelLevelConstraint.from_dict(raw_constraint)
|
||||
return c
|
||||
except Exception:
|
||||
raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]:
|
||||
return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None]
|
||||
|
||||
@classmethod
|
||||
def render_raw_model_constraint(cls, raw_constraint: Dict[str, Any]) -> Optional[str]:
|
||||
constraint = cls._parse_model_constraint(raw_constraint)
|
||||
return cls.process_parsed_constraint(constraint, cls.render_model_constraint)
|
||||
|
||||
@classmethod
|
||||
def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]:
|
||||
"""Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
|
||||
rendering."""
|
||||
constraint_prefix = f"constraint {constraint.name} " if constraint.name else ""
|
||||
column_list = ", ".join(constraint.columns)
|
||||
if constraint.type == ConstraintType.check and constraint.expression:
|
||||
return f"{constraint_prefix}check ({constraint.expression})"
|
||||
elif constraint.type == ConstraintType.unique:
|
||||
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
|
||||
return f"{constraint_prefix}unique{constraint_expression} ({column_list})"
|
||||
elif constraint.type == ConstraintType.primary_key:
|
||||
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
|
||||
return f"{constraint_prefix}primary key{constraint_expression} ({column_list})"
|
||||
elif constraint.type == ConstraintType.foreign_key and constraint.expression:
|
||||
return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}"
|
||||
elif constraint.type == ConstraintType.custom and constraint.expression:
|
||||
return f"{constraint_prefix}{constraint.expression}"
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
|
||||
@@ -7,9 +7,9 @@ from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import Project
|
||||
from dbt.config.project import PartialProject
|
||||
|
||||
partial = Project.partial_load(include_path)
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
@@ -4,8 +4,7 @@ from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from dbt.adapters.reference_keys import (
|
||||
_make_ref_key,
|
||||
_make_ref_key_msg,
|
||||
_make_msg_from_ref_key,
|
||||
_make_ref_key_dict,
|
||||
_ReferenceKey,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
@@ -17,7 +16,7 @@ from dbt.exceptions import (
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
import dbt.flags as flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.utils import lowercase
|
||||
|
||||
|
||||
@@ -230,7 +229,7 @@ class RelationsCache:
|
||||
# self.relations or any cache entry's referenced_by during iteration
|
||||
# it's a runtime error!
|
||||
with self.lock:
|
||||
return {dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items()}
|
||||
return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()}
|
||||
|
||||
def _setdefault(self, relation: _CachedRelation):
|
||||
"""Add a relation to the cache, or return it if it already exists.
|
||||
@@ -290,8 +289,8 @@ class RelationsCache:
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
fire_event(
|
||||
CacheAction(
|
||||
ref_key=_make_msg_from_ref_key(ref_key),
|
||||
ref_key_2=_make_msg_from_ref_key(dep_key),
|
||||
ref_key=ref_key._asdict(),
|
||||
ref_key_2=dep_key._asdict(),
|
||||
)
|
||||
)
|
||||
return
|
||||
@@ -306,8 +305,8 @@ class RelationsCache:
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="add_link",
|
||||
ref_key=_make_msg_from_ref_key(dep_key),
|
||||
ref_key_2=_make_msg_from_ref_key(ref_key),
|
||||
ref_key=dep_key._asdict(),
|
||||
ref_key_2=ref_key._asdict(),
|
||||
)
|
||||
)
|
||||
with self.lock:
|
||||
@@ -319,12 +318,13 @@ class RelationsCache:
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
flags = get_flags()
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_msg(cached)))
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
@@ -358,7 +358,7 @@ class RelationsCache:
|
||||
:param str identifier: The identifier of the relation to drop.
|
||||
"""
|
||||
dropped_key = _make_ref_key(relation)
|
||||
dropped_key_msg = _make_ref_key_msg(relation)
|
||||
dropped_key_msg = _make_ref_key_dict(relation)
|
||||
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
||||
with self.lock:
|
||||
if dropped_key not in self.relations:
|
||||
@@ -366,7 +366,7 @@ class RelationsCache:
|
||||
return
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||
consequence_msgs = [_make_msg_from_ref_key(key) for key in consequences]
|
||||
consequence_msgs = [key._asdict() for key in consequences]
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
|
||||
@@ -396,9 +396,9 @@ class RelationsCache:
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="update_reference",
|
||||
ref_key=_make_ref_key_msg(old_key),
|
||||
ref_key_2=_make_ref_key_msg(new_key),
|
||||
ref_key_3=_make_ref_key_msg(cached.key()),
|
||||
ref_key=_make_ref_key_dict(old_key),
|
||||
ref_key_2=_make_ref_key_dict(new_key),
|
||||
ref_key_3=_make_ref_key_dict(cached.key()),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -429,9 +429,7 @@ class RelationsCache:
|
||||
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
||||
|
||||
if old_key not in self.relations:
|
||||
fire_event(
|
||||
CacheAction(action="temporary_relation", ref_key=_make_msg_from_ref_key(old_key))
|
||||
)
|
||||
fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict()))
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -452,11 +450,11 @@ class RelationsCache:
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="rename_relation",
|
||||
ref_key=_make_msg_from_ref_key(old_key),
|
||||
ref_key_2=_make_msg_from_ref_key(new),
|
||||
ref_key=old_key._asdict(),
|
||||
ref_key_2=new_key._asdict(),
|
||||
)
|
||||
)
|
||||
|
||||
flags = get_flags()
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
|
||||
@@ -9,10 +9,11 @@ from dbt.adapters.base.plugin import AdapterPlugin
|
||||
from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered
|
||||
from dbt.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.semver import VersionSpecifier
|
||||
|
||||
Adapter = AdapterProtocol
|
||||
|
||||
@@ -89,7 +90,13 @@ class AdapterContainer:
|
||||
def register_adapter(self, config: AdapterRequiredConfig) -> None:
|
||||
adapter_name = config.credentials.type
|
||||
adapter_type = self.get_adapter_class_by_name(adapter_name)
|
||||
|
||||
adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version
|
||||
adapter_version_specifier = VersionSpecifier.from_version_string(
|
||||
adapter_version
|
||||
).to_version_string()
|
||||
fire_event(
|
||||
AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier)
|
||||
)
|
||||
with self.lock:
|
||||
if adapter_name in self.adapters:
|
||||
# this shouldn't really happen...
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from collections import namedtuple
|
||||
from typing import Any, Optional
|
||||
from dbt.events.proto_types import ReferenceKeyMsg
|
||||
|
||||
|
||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||
@@ -30,11 +29,9 @@ def _make_ref_key(relation: Any) -> _ReferenceKey:
|
||||
)
|
||||
|
||||
|
||||
def _make_ref_key_msg(relation: Any):
|
||||
return _make_msg_from_ref_key(_make_ref_key(relation))
|
||||
|
||||
|
||||
def _make_msg_from_ref_key(ref_key: _ReferenceKey) -> ReferenceKeyMsg:
|
||||
return ReferenceKeyMsg(
|
||||
database=ref_key.database, schema=ref_key.schema, identifier=ref_key.identifier
|
||||
)
|
||||
def _make_ref_key_dict(relation: Any):
|
||||
return {
|
||||
"database": relation.database,
|
||||
"schema": relation.schema,
|
||||
"identifier": relation.identifier,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
|
||||
import agate
|
||||
|
||||
@@ -52,6 +52,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
bindings: Optional[Any] = None,
|
||||
abridge_sql_log: bool = False,
|
||||
) -> Tuple[Connection, Any]:
|
||||
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
@@ -128,6 +129,14 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
@@ -146,6 +155,10 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def add_commit_query(self):
|
||||
return self.add_query("COMMIT", auto_begin=False)
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
sql = self._add_query_comment(sql)
|
||||
return self.add_query(sql, auto_begin=False)
|
||||
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is True:
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Optional, Tuple, Type, List
|
||||
from dbt.contracts.connection import Connection
|
||||
from dbt.exceptions import RelationTypeNullError
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.cache import _make_ref_key_msg
|
||||
from dbt.adapters.cache import _make_ref_key_dict
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
||||
@@ -109,7 +109,7 @@ class SQLAdapter(BaseAdapter):
|
||||
ColTypeChange(
|
||||
orig_type=target_column.data_type,
|
||||
new_type=new_type,
|
||||
table=_make_ref_key_msg(current),
|
||||
table=_make_ref_key_dict(current),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -152,7 +152,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def create_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
fire_event(SchemaCreation(relation=_make_ref_key_msg(relation)))
|
||||
fire_event(SchemaCreation(relation=_make_ref_key_dict(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
@@ -163,7 +163,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def drop_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
fire_event(SchemaDrop(relation=_make_ref_key_msg(relation)))
|
||||
fire_event(SchemaDrop(relation=_make_ref_key_dict(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
@@ -197,6 +197,7 @@ class SQLAdapter(BaseAdapter):
|
||||
)
|
||||
return relations
|
||||
|
||||
@classmethod
|
||||
def quote(self, identifier):
|
||||
return '"{}"'.format(identifier)
|
||||
|
||||
|
||||
@@ -1 +1,49 @@
|
||||
TODO
|
||||
# Exception Handling
|
||||
|
||||
## `requires.py`
|
||||
|
||||
### `postflight`
|
||||
In the postflight decorator, the click command is invoked (i.e. `func(*args, **kwargs)`) and wrapped in a `try/except` block to handle any exceptions thrown.
|
||||
Any exceptions thrown from `postflight` are wrapped by custom exceptions from the `dbt.cli.exceptions` module (i.e. `ResultExit`, `ExceptionExit`) to instruct click to complete execution with a particular exit code.
|
||||
|
||||
Some `dbt-core` handled exceptions have an attribute named `results` which contains results from running nodes (e.g. `FailFastError`). These are wrapped in the `ResultExit` exception to represent runs that have failed in a way that `dbt-core` expects.
|
||||
If the invocation of the command does not throw any exceptions but does not succeed, `postflight` will still raise the `ResultExit` exception to make use of the exit code.
|
||||
These exceptions produce an exit code of `1`.
|
||||
|
||||
Exceptions wrapped with `ExceptionExit` may be thrown by `dbt-core` intentionally (i.e. an exception that inherits from `dbt.exceptions.Exception`) or unintentionally (i.e. exceptions thrown by the python runtime). In either case these are considered errors that `dbt-core` did not expect and are treated as genuine exceptions.
|
||||
These exceptions produce an exit code of `2`.
|
||||
|
||||
If no exceptions are thrown from invoking the command and the command succeeds, `postflight` will not raise any exceptions.
|
||||
When no exceptions are raised an exit code of `0` is produced.
|
||||
|
||||
## `main.py`
|
||||
|
||||
### `dbtRunner`
|
||||
`dbtRunner` provides a programmatic interface for our click CLI and wraps the invocation of the click commands to handle any exceptions thrown.
|
||||
|
||||
`dbtRunner.invoke` should ideally only ever return an instantiated `dbtRunnerResult` which contains the following fields:
|
||||
- `success`: A boolean representing whether the command invocation was successful
|
||||
- `result`: The optional result of the command invoked. This attribute can have many types, please see the definition of `dbtRunnerResult` for more information
|
||||
- `exception`: If an exception was thrown during command invocation it will be saved here, otherwise it will be `None`. Please note that the exceptions held in this attribute are not the exceptions thrown by `preflight` but instead the exceptions that `ResultExit` and `ExceptionExit` wrap
|
||||
|
||||
Programmatic exception handling might look like the following:
|
||||
```python
|
||||
res = dbtRunner().invoke(["run"])
|
||||
if not res.success:
|
||||
...
|
||||
if type(res.exception) == SomeExceptionType:
|
||||
...
|
||||
```
|
||||
|
||||
## `dbt/tests/util.py`
|
||||
|
||||
### `run_dbt`
|
||||
In many of our functional and integration tests, we want to be sure that an invocation of `dbt` raises a certain exception.
|
||||
A common pattern for these assertions:
|
||||
```python
|
||||
class TestSomething:
|
||||
def test_something(self, project):
|
||||
with pytest.raises(SomeException):
|
||||
run_dbt(["run"])
|
||||
```
|
||||
To allow these tests to assert that exceptions have been thrown, the `run_dbt` function will raise any exceptions it recieves from the invocation of a `dbt` command.
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from .main import cli as dbt_cli # noqa
|
||||
|
||||
16
core/dbt/cli/context.py
Normal file
16
core/dbt/cli/context.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import click
|
||||
from typing import Optional
|
||||
|
||||
from dbt.cli.main import cli as dbt
|
||||
|
||||
|
||||
def make_context(args, command=dbt) -> Optional[click.Context]:
|
||||
try:
|
||||
ctx = command.make_context(command.name, args)
|
||||
except click.exceptions.Exit:
|
||||
return None
|
||||
|
||||
ctx.invoked_subcommand = ctx.protected_args[0] if ctx.protected_args else None
|
||||
ctx.obj = {}
|
||||
|
||||
return ctx
|
||||
43
core/dbt/cli/exceptions.py
Normal file
43
core/dbt/cli/exceptions.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from typing import Optional, IO
|
||||
|
||||
from click.exceptions import ClickException
|
||||
from dbt.utils import ExitCodes
|
||||
|
||||
|
||||
class DbtUsageException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DbtInternalException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CliException(ClickException):
|
||||
"""The base exception class for our implementation of the click CLI.
|
||||
The exit_code attribute is used by click to determine which exit code to produce
|
||||
after an invocation."""
|
||||
|
||||
def __init__(self, exit_code: ExitCodes) -> None:
|
||||
self.exit_code = exit_code.value
|
||||
|
||||
# the typing of _file is to satisfy the signature of ClickException.show
|
||||
# overriding this method prevents click from printing any exceptions to stdout
|
||||
def show(self, _file: Optional[IO] = None) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class ResultExit(CliException):
|
||||
"""This class wraps any exception that contains results while invoking dbt, or the
|
||||
results of an invocation that did not succeed but did not throw any exceptions."""
|
||||
|
||||
def __init__(self, result) -> None:
|
||||
super().__init__(ExitCodes.ModelError)
|
||||
self.result = result
|
||||
|
||||
|
||||
class ExceptionExit(CliException):
|
||||
"""This class wraps any exception that does not contain results thrown while invoking dbt."""
|
||||
|
||||
def __init__(self, exception: Exception) -> None:
|
||||
super().__init__(ExitCodes.UnhandledError)
|
||||
self.exception = exception
|
||||
@@ -1,44 +1,279 @@
|
||||
# TODO Move this to /core/dbt/flags.py when we're ready to break things
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from importlib import import_module
|
||||
from multiprocessing import get_context
|
||||
from pprint import pformat as pf
|
||||
from typing import Callable, Dict, List, Set, Union
|
||||
|
||||
from click import get_current_context
|
||||
from click import Context, get_current_context
|
||||
from click.core import Command, Group, ParameterSource
|
||||
from dbt.cli.exceptions import DbtUsageException
|
||||
from dbt.cli.resolvers import default_log_path, default_project_dir
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.project import UserConfig
|
||||
from dbt.deprecations import renamed_env_var
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
|
||||
if os.name != "nt":
|
||||
# https://bugs.python.org/issue41567
|
||||
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
|
||||
|
||||
FLAGS_DEFAULTS = {
|
||||
"INDIRECT_SELECTION": "eager",
|
||||
"TARGET_PATH": None,
|
||||
# Cli args without user_config or env var option.
|
||||
"FULL_REFRESH": False,
|
||||
"STRICT_MODE": False,
|
||||
"STORE_FAILURES": False,
|
||||
"INTROSPECT": True,
|
||||
}
|
||||
|
||||
DEPRECATED_PARAMS = {
|
||||
"deprecated_defer": "defer",
|
||||
"deprecated_favor_state": "favor_state",
|
||||
"deprecated_print": "print",
|
||||
"deprecated_state": "state",
|
||||
}
|
||||
|
||||
|
||||
def convert_config(config_name, config_value):
|
||||
"""Convert the values from config and original set_from_args to the correct type."""
|
||||
ret = config_value
|
||||
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
|
||||
ret = WarnErrorOptions(
|
||||
include=config_value.get("include", []), exclude=config_value.get("exclude", [])
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
def args_to_context(args: List[str]) -> Context:
|
||||
"""Convert a list of args to a click context with proper hierarchy for dbt commands"""
|
||||
from dbt.cli.main import cli
|
||||
|
||||
cli_ctx = cli.make_context(cli.name, args)
|
||||
# Split args if they're a comma seperated string.
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if type(sub_command) == Group:
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
|
||||
assert type(sub_command) == Command
|
||||
sub_command_ctx = sub_command.make_context(sub_command_name, args)
|
||||
sub_command_ctx.parent = cli_ctx
|
||||
return sub_command_ctx
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Flags:
|
||||
def __init__(self, ctx=None) -> None:
|
||||
"""Primary configuration artifact for running dbt"""
|
||||
|
||||
def __init__(self, ctx: Context = None, user_config: UserConfig = None) -> None:
|
||||
|
||||
# Set the default flags.
|
||||
for key, value in FLAGS_DEFAULTS.items():
|
||||
object.__setattr__(self, key, value)
|
||||
|
||||
if ctx is None:
|
||||
ctx = get_current_context()
|
||||
|
||||
def assign_params(ctx):
|
||||
def _get_params_by_source(ctx: Context, source_type: ParameterSource):
|
||||
"""Generates all params of a given source type."""
|
||||
yield from [
|
||||
name for name, source in ctx._parameter_source.items() if source is source_type
|
||||
]
|
||||
if ctx.parent:
|
||||
yield from _get_params_by_source(ctx.parent, source_type)
|
||||
|
||||
# Ensure that any params sourced from the commandline are not present more than once.
|
||||
# Click handles this exclusivity, but only at a per-subcommand level.
|
||||
seen_params = []
|
||||
for param in _get_params_by_source(ctx, ParameterSource.COMMANDLINE):
|
||||
if param in seen_params:
|
||||
raise DbtUsageException(
|
||||
f"{param.lower()} was provided both before and after the subcommand, it can only be set either before or after.",
|
||||
)
|
||||
seen_params.append(param)
|
||||
|
||||
def _assign_params(
|
||||
ctx: Context,
|
||||
params_assigned_from_default: set,
|
||||
deprecated_env_vars: Dict[str, Callable],
|
||||
):
|
||||
"""Recursively adds all click params to flag object"""
|
||||
for param_name, param_value in ctx.params.items():
|
||||
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
|
||||
# when using frozen dataclasses.
|
||||
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
|
||||
if hasattr(self, param_name):
|
||||
raise Exception(f"Duplicate flag names found in click command: {param_name}")
|
||||
object.__setattr__(self, param_name.upper(), param_value)
|
||||
|
||||
# Handle deprecated env vars while still respecting old values
|
||||
# e.g. DBT_NO_PRINT -> DBT_PRINT if DBT_NO_PRINT is set, it is
|
||||
# respected over DBT_PRINT or --print.
|
||||
new_name: Union[str, None] = None
|
||||
if param_name in DEPRECATED_PARAMS:
|
||||
|
||||
# Deprecated env vars can only be set via env var.
|
||||
# We use the deprecated option in click to serialize the value
|
||||
# from the env var string.
|
||||
param_source = ctx.get_parameter_source(param_name)
|
||||
if param_source == ParameterSource.DEFAULT:
|
||||
continue
|
||||
elif param_source != ParameterSource.ENVIRONMENT:
|
||||
raise DbtUsageException(
|
||||
"Deprecated parameters can only be set via environment variables",
|
||||
)
|
||||
|
||||
# Rename for clarity.
|
||||
dep_name = param_name
|
||||
new_name = DEPRECATED_PARAMS.get(dep_name)
|
||||
try:
|
||||
assert isinstance(new_name, str)
|
||||
except AssertionError:
|
||||
raise Exception(
|
||||
f"No deprecated param name match in DEPRECATED_PARAMS from {dep_name} to {new_name}"
|
||||
)
|
||||
|
||||
# Find param objects for their envvar name.
|
||||
try:
|
||||
dep_param = [x for x in ctx.command.params if x.name == dep_name][0]
|
||||
new_param = [x for x in ctx.command.params if x.name == new_name][0]
|
||||
except IndexError:
|
||||
raise Exception(
|
||||
f"No deprecated param name match in context from {dep_name} to {new_name}"
|
||||
)
|
||||
|
||||
# Remove param from defaulted set since the deprecated
|
||||
# value is not set from default, but from an env var.
|
||||
if new_name in params_assigned_from_default:
|
||||
params_assigned_from_default.remove(new_name)
|
||||
|
||||
# Add the deprecation warning function to the set.
|
||||
assert isinstance(dep_param.envvar, str)
|
||||
assert isinstance(new_param.envvar, str)
|
||||
deprecated_env_vars[new_name] = renamed_env_var(
|
||||
old_name=dep_param.envvar,
|
||||
new_name=new_param.envvar,
|
||||
)
|
||||
|
||||
# Set the flag value.
|
||||
is_duplicate = hasattr(self, param_name.upper())
|
||||
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||
flag_name = (new_name or param_name).upper()
|
||||
|
||||
if (is_duplicate and not is_default) or not is_duplicate:
|
||||
object.__setattr__(self, flag_name, param_value)
|
||||
|
||||
# Track default assigned params.
|
||||
if is_default:
|
||||
params_assigned_from_default.add(param_name)
|
||||
|
||||
if ctx.parent:
|
||||
assign_params(ctx.parent)
|
||||
_assign_params(ctx.parent, params_assigned_from_default, deprecated_env_vars)
|
||||
|
||||
assign_params(ctx)
|
||||
params_assigned_from_default = set() # type: Set[str]
|
||||
deprecated_env_vars: Dict[str, Callable] = {}
|
||||
_assign_params(ctx, params_assigned_from_default, deprecated_env_vars)
|
||||
|
||||
# Hard coded flags
|
||||
object.__setattr__(self, "WHICH", ctx.info_name)
|
||||
# Set deprecated_env_var_warnings to be fired later after events have been init.
|
||||
object.__setattr__(
|
||||
self, "deprecated_env_var_warnings", [x for x in deprecated_env_vars.values()]
|
||||
)
|
||||
|
||||
# Get the invoked command flags.
|
||||
invoked_subcommand_name = (
|
||||
ctx.invoked_subcommand if hasattr(ctx, "invoked_subcommand") else None
|
||||
)
|
||||
if invoked_subcommand_name is not None:
|
||||
invoked_subcommand = getattr(import_module("dbt.cli.main"), invoked_subcommand_name)
|
||||
invoked_subcommand.allow_extra_args = True
|
||||
invoked_subcommand.ignore_unknown_options = True
|
||||
invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv)
|
||||
_assign_params(
|
||||
invoked_subcommand_ctx, params_assigned_from_default, deprecated_env_vars
|
||||
)
|
||||
|
||||
if not user_config:
|
||||
profiles_dir = getattr(self, "PROFILES_DIR", None)
|
||||
user_config = read_user_config(profiles_dir) if profiles_dir else None
|
||||
|
||||
# Overwrite default assignments with user config if available.
|
||||
if user_config:
|
||||
param_assigned_from_default_copy = params_assigned_from_default.copy()
|
||||
for param_assigned_from_default in params_assigned_from_default:
|
||||
user_config_param_value = getattr(user_config, param_assigned_from_default, None)
|
||||
if user_config_param_value is not None:
|
||||
object.__setattr__(
|
||||
self,
|
||||
param_assigned_from_default.upper(),
|
||||
convert_config(param_assigned_from_default, user_config_param_value),
|
||||
)
|
||||
param_assigned_from_default_copy.remove(param_assigned_from_default)
|
||||
params_assigned_from_default = param_assigned_from_default_copy
|
||||
|
||||
# Set hard coded flags.
|
||||
object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name)
|
||||
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
|
||||
|
||||
# Support console DO NOT TRACK initiave
|
||||
if os.getenv("DO_NOT_TRACK", "").lower() in (1, "t", "true", "y", "yes"):
|
||||
object.__setattr__(self, "ANONYMOUS_USAGE_STATS", False)
|
||||
# Apply the lead/follow relationship between some parameters.
|
||||
self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default)
|
||||
self._override_if_set("LOG_LEVEL", "LOG_LEVEL_FILE", params_assigned_from_default)
|
||||
self._override_if_set("LOG_FORMAT", "LOG_FORMAT_FILE", params_assigned_from_default)
|
||||
|
||||
# Set default LOG_PATH from PROJECT_DIR, if available.
|
||||
# Starting in v1.5, if `log-path` is set in `dbt_project.yml`, it will raise a deprecation warning,
|
||||
# with the possibility of removing it in a future release.
|
||||
if getattr(self, "LOG_PATH", None) is None:
|
||||
project_dir = getattr(self, "PROJECT_DIR", default_project_dir())
|
||||
version_check = getattr(self, "VERSION_CHECK", True)
|
||||
object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check))
|
||||
|
||||
# Support console DO NOT TRACK initiative.
|
||||
if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"):
|
||||
object.__setattr__(self, "SEND_ANONYMOUS_USAGE_STATS", False)
|
||||
|
||||
# Check mutual exclusivity once all flags are set.
|
||||
self._assert_mutually_exclusive(
|
||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||
)
|
||||
|
||||
# Support lower cased access for legacy code.
|
||||
params = set(
|
||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||
)
|
||||
for param in params:
|
||||
object.__setattr__(self, param.lower(), getattr(self, param))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(pf(self.__dict__))
|
||||
|
||||
def _override_if_set(self, lead: str, follow: str, defaulted: Set[str]) -> None:
|
||||
"""If the value of the lead parameter was set explicitly, apply the value to follow, unless follow was also set explicitly."""
|
||||
if lead.lower() not in defaulted and follow.lower() in defaulted:
|
||||
object.__setattr__(self, follow.upper(), getattr(self, lead.upper(), None))
|
||||
|
||||
def _assert_mutually_exclusive(
|
||||
self, params_assigned_from_default: Set[str], group: List[str]
|
||||
) -> None:
|
||||
"""
|
||||
Ensure no elements from group are simultaneously provided by a user, as inferred from params_assigned_from_default.
|
||||
Raises click.UsageError if any two elements from group are simultaneously provided by a user.
|
||||
"""
|
||||
set_flag = None
|
||||
for flag in group:
|
||||
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||
if flag_set_by_user and set_flag:
|
||||
raise DbtUsageException(
|
||||
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||
)
|
||||
elif flag_set_by_user:
|
||||
set_flag = flag
|
||||
|
||||
def fire_deprecations(self):
|
||||
"""Fires events for deprecated env_var usage."""
|
||||
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
||||
# It is necessary to remove this attr from the class so it does
|
||||
# not get pickled when written to disk as json.
|
||||
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||
|
||||
@@ -1,22 +1,121 @@
|
||||
import inspect # This is temporary for RAT-ing
|
||||
from copy import copy
|
||||
from pprint import pformat as pf # This is temporary for RAT-ing
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import click
|
||||
from dbt.adapters.factory import adapter_management
|
||||
from dbt.cli import params as p
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.profiler import profiler
|
||||
from click.exceptions import (
|
||||
Exit as ClickExit,
|
||||
BadOptionUsage,
|
||||
NoSuchOption,
|
||||
UsageError,
|
||||
)
|
||||
|
||||
from dbt.cli import requires, params as p
|
||||
from dbt.cli.exceptions import (
|
||||
DbtInternalException,
|
||||
DbtUsageException,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.results import (
|
||||
CatalogArtifact,
|
||||
RunExecutionResult,
|
||||
RunOperationResultsArtifact,
|
||||
)
|
||||
from dbt.events.base_types import EventMsg
|
||||
from dbt.task.build import BuildTask
|
||||
from dbt.task.clean import CleanTask
|
||||
from dbt.task.compile import CompileTask
|
||||
from dbt.task.debug import DebugTask
|
||||
from dbt.task.deps import DepsTask
|
||||
from dbt.task.freshness import FreshnessTask
|
||||
from dbt.task.generate import GenerateTask
|
||||
from dbt.task.init import InitTask
|
||||
from dbt.task.list import ListTask
|
||||
from dbt.task.run import RunTask
|
||||
from dbt.task.run_operation import RunOperationTask
|
||||
from dbt.task.seed import SeedTask
|
||||
from dbt.task.serve import ServeTask
|
||||
from dbt.task.show import ShowTask
|
||||
from dbt.task.snapshot import SnapshotTask
|
||||
from dbt.task.test import TestTask
|
||||
|
||||
|
||||
def cli_runner():
|
||||
# Alias "list" to "ls"
|
||||
ls = copy(cli.commands["list"])
|
||||
ls.hidden = True
|
||||
cli.add_command(ls, "ls")
|
||||
@dataclass
|
||||
class dbtRunnerResult:
|
||||
"""Contains the result of an invocation of the dbtRunner"""
|
||||
|
||||
# Run the cli
|
||||
cli()
|
||||
success: bool
|
||||
|
||||
exception: Optional[BaseException] = None
|
||||
result: Union[
|
||||
bool, # debug
|
||||
CatalogArtifact, # docs generate
|
||||
List[str], # list/ls
|
||||
Manifest, # parse
|
||||
None, # clean, deps, init, source
|
||||
RunExecutionResult, # build, compile, run, seed, snapshot, test
|
||||
RunOperationResultsArtifact, # run-operation
|
||||
] = None
|
||||
|
||||
|
||||
# Programmatic invocation
|
||||
class dbtRunner:
|
||||
def __init__(
|
||||
self,
|
||||
manifest: Manifest = None,
|
||||
callbacks: List[Callable[[EventMsg], None]] = None,
|
||||
):
|
||||
self.manifest = manifest
|
||||
|
||||
if callbacks is None:
|
||||
callbacks = []
|
||||
self.callbacks = callbacks
|
||||
|
||||
def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult:
|
||||
try:
|
||||
dbt_ctx = cli.make_context(cli.name, args)
|
||||
dbt_ctx.obj = {
|
||||
"manifest": self.manifest,
|
||||
"callbacks": self.callbacks,
|
||||
}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
dbt_ctx.params[key] = value
|
||||
# Hack to set parameter source to custom string
|
||||
dbt_ctx.set_parameter_source(key, "kwargs") # type: ignore
|
||||
|
||||
result, success = cli.invoke(dbt_ctx)
|
||||
return dbtRunnerResult(
|
||||
result=result,
|
||||
success=success,
|
||||
)
|
||||
except requires.ResultExit as e:
|
||||
return dbtRunnerResult(
|
||||
result=e.result,
|
||||
success=False,
|
||||
)
|
||||
except requires.ExceptionExit as e:
|
||||
return dbtRunnerResult(
|
||||
exception=e.exception,
|
||||
success=False,
|
||||
)
|
||||
except (BadOptionUsage, NoSuchOption, UsageError) as e:
|
||||
return dbtRunnerResult(
|
||||
exception=DbtUsageException(e.message),
|
||||
success=False,
|
||||
)
|
||||
except ClickExit as e:
|
||||
if e.exit_code == 0:
|
||||
return dbtRunnerResult(success=True)
|
||||
return dbtRunnerResult(
|
||||
exception=DbtInternalException(f"unhandled exit code {e.exit_code}"),
|
||||
success=False,
|
||||
)
|
||||
except BaseException as e:
|
||||
return dbtRunnerResult(
|
||||
exception=e,
|
||||
success=False,
|
||||
)
|
||||
|
||||
|
||||
# dbt
|
||||
@@ -27,21 +126,29 @@ def cli_runner():
|
||||
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||
)
|
||||
@click.pass_context
|
||||
@p.anonymous_usage_stats
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.deprecated_print
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@@ -52,49 +159,51 @@ def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
For more documentation on these commands, visit: docs.getdbt.com
|
||||
"""
|
||||
incomplete_flags = Flags()
|
||||
|
||||
# Profiling
|
||||
if incomplete_flags.RECORD_TIMING_INFO:
|
||||
ctx.with_resource(profiler(enable=True, outfile=incomplete_flags.RECORD_TIMING_INFO))
|
||||
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
# Version info
|
||||
if incomplete_flags.VERSION:
|
||||
click.echo(f"`version` called\n ctx.params: {pf(ctx.params)}")
|
||||
return
|
||||
else:
|
||||
del ctx.params["version"]
|
||||
|
||||
|
||||
# dbt build
|
||||
@cli.command("build")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.indirect_selection
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.store_failures
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def build(ctx, **kwargs):
|
||||
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""Run all seeds, models, snapshots, and tests in DAG order"""
|
||||
task = BuildTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt clean
|
||||
@@ -104,11 +213,19 @@ def build(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def clean(ctx, **kwargs):
|
||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt docs
|
||||
@@ -123,23 +240,39 @@ def docs(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.compile_docs
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest(write=False)
|
||||
def docs_generate(ctx, **kwargs):
|
||||
"""Generate the documentation website for your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = GenerateTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt docs serve
|
||||
@@ -151,36 +284,114 @@ def docs_generate(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
def docs_serve(ctx, **kwargs):
|
||||
"""Serve the documentation website for your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = ServeTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt compile
|
||||
@cli.command("compile")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.parse_only
|
||||
@p.show_output_format
|
||||
@p.indirect_selection
|
||||
@p.introspect
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.inline
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def compile(ctx, **kwargs):
|
||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the
|
||||
target/ directory."""
|
||||
task = CompileTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt show
|
||||
@cli.command("show")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.show_output_format
|
||||
@p.show_limit
|
||||
@p.indirect_selection
|
||||
@p.introspect
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.inline
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def show(ctx, **kwargs):
|
||||
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
|
||||
results. Does not materialize anything to the warehouse."""
|
||||
task = ShowTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt debug
|
||||
@@ -188,44 +399,66 @@ def compile(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.config_dir
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.profiles_dir_exists_false
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def debug(ctx, **kwargs):
|
||||
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""Test the database connection and show information for debugging purposes. Not to be confused with the --debug option which increases verbosity."""
|
||||
|
||||
task = DebugTask(
|
||||
ctx.obj["flags"],
|
||||
None,
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt deps
|
||||
@cli.command("deps")
|
||||
@click.pass_context
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.profiles_dir_exists_false
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def deps(ctx, **kwargs):
|
||||
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt init
|
||||
@cli.command("init")
|
||||
@click.pass_context
|
||||
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||
@click.argument("project_name", required=False)
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.profiles_dir_exists_false
|
||||
@p.project_dir
|
||||
@p.skip_profile_setup
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def init(ctx, **kwargs):
|
||||
"""Initialize a new DBT project."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""Initialize a new dbt project."""
|
||||
task = InitTask(ctx.obj["flags"], None)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt list
|
||||
@@ -240,21 +473,41 @@ def init(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.raw_select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def list(ctx, **kwargs):
|
||||
"""List the resources in your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = ListTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Alias "list" to "ls"
|
||||
ls = copy(cli.commands["list"])
|
||||
ls.hidden = True
|
||||
cli.add_command(ls, "ls")
|
||||
|
||||
|
||||
# dbt parse
|
||||
@cli.command("parse")
|
||||
@click.pass_context
|
||||
@p.compile_parse
|
||||
@p.log_path
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -263,51 +516,88 @@ def list(ctx, **kwargs):
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@p.write_manifest
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest(write_perf_info=True)
|
||||
def parse(ctx, **kwargs):
|
||||
"""Parses the project and provides information on performance"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
# manifest generation and writing happens in @requires.manifest
|
||||
|
||||
return ctx.obj["manifest"], True
|
||||
|
||||
|
||||
# dbt run
|
||||
@cli.command("run")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run(ctx, **kwargs):
|
||||
"""Compile SQL and execute against the current target database."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = RunTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt run operation
|
||||
@cli.command("run-operation")
|
||||
@click.pass_context
|
||||
@click.argument("macro")
|
||||
@p.args
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run_operation(ctx, **kwargs):
|
||||
"""Run the named macro with any supplied arguments."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = RunOperationTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt seed
|
||||
@@ -315,43 +605,73 @@ def run_operation(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def seed(ctx, **kwargs):
|
||||
"""Load data from csv files into your data warehouse."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = SeedTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt snapshot
|
||||
@cli.command("snapshot")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.models
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def snapshot(ctx, **kwargs):
|
||||
"""Execute snapshots defined in your project"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = SnapshotTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt source
|
||||
@@ -365,48 +685,85 @@ def source(ctx, **kwargs):
|
||||
@source.command("freshness")
|
||||
@click.pass_context
|
||||
@p.exclude
|
||||
@p.models
|
||||
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def freshness(ctx, **kwargs):
|
||||
"""Snapshots the current freshness of the project's sources"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
"""check the current freshness of the project's sources"""
|
||||
task = FreshnessTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Alias "source freshness" to "snapshot-freshness"
|
||||
snapshot_freshness = copy(cli.commands["source"].commands["freshness"]) # type: ignore
|
||||
snapshot_freshness.hidden = True
|
||||
cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") # type: ignore
|
||||
|
||||
|
||||
# dbt test
|
||||
@cli.command("test")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.indirect_selection
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.deprecated_state
|
||||
@p.store_failures
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def test(ctx, **kwargs):
|
||||
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
task = TestTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Support running as a module
|
||||
if __name__ == "__main__":
|
||||
cli_runner()
|
||||
cli()
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from click import ParamType
|
||||
import yaml
|
||||
from click import ParamType, Choice
|
||||
|
||||
from dbt.config.utils import parse_cli_yaml_string
|
||||
from dbt.exceptions import ValidationError, DbtValidationError, OptionNotYamlDictError
|
||||
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
|
||||
@@ -14,8 +16,9 @@ class YAML(ParamType):
|
||||
if not isinstance(value, str):
|
||||
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
|
||||
try:
|
||||
return yaml.load(value, Loader=yaml.Loader)
|
||||
except yaml.parser.ParserError:
|
||||
param_option_name = param.opts[0] if param.opts else param.name
|
||||
return parse_cli_yaml_string(value, param_option_name.strip("-"))
|
||||
except (ValidationError, DbtValidationError, OptionNotYamlDictError):
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
@@ -25,6 +28,7 @@ class WarnErrorOptionsType(YAML):
|
||||
name = "WarnErrorOptionsType"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# this function is being used by param in click
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
|
||||
return WarnErrorOptions(
|
||||
@@ -46,3 +50,13 @@ class Truthy(ParamType):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class ChoiceTuple(Choice):
|
||||
name = "CHOICE_TUPLE"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
for value_item in value:
|
||||
super().convert(value_item, param, ctx)
|
||||
|
||||
return value
|
||||
|
||||
75
core/dbt/cli/options.py
Normal file
75
core/dbt/cli/options.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import click
|
||||
import inspect
|
||||
import typing as t
|
||||
from click import Context
|
||||
from dbt.cli.option_types import ChoiceTuple
|
||||
|
||||
|
||||
# Implementation from: https://stackoverflow.com/a/48394004
|
||||
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||
class MultiOption(click.Option):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||
nargs = kwargs.pop("nargs", -1)
|
||||
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||
super(MultiOption, self).__init__(*args, **kwargs)
|
||||
self._previous_parser_process = None
|
||||
self._eat_all_parser = None
|
||||
|
||||
# validate that multiple=True
|
||||
multiple = kwargs.pop("multiple", None)
|
||||
msg = f"MultiOption named `{self.name}` must have multiple=True (rather than {multiple})"
|
||||
assert multiple, msg
|
||||
|
||||
# validate that type=tuple or type=ChoiceTuple
|
||||
option_type = kwargs.pop("type", None)
|
||||
msg = f"MultiOption named `{self.name}` must be tuple or ChoiceTuple (rather than {option_type})"
|
||||
if inspect.isclass(option_type):
|
||||
assert issubclass(option_type, tuple), msg
|
||||
else:
|
||||
assert isinstance(option_type, ChoiceTuple), msg
|
||||
|
||||
def add_to_parser(self, parser, ctx):
|
||||
def parser_process(value, state):
|
||||
# method to hook to the parser.process
|
||||
done = False
|
||||
value = [value]
|
||||
if self.save_other_options:
|
||||
# grab everything up to the next option
|
||||
while state.rargs and not done:
|
||||
for prefix in self._eat_all_parser.prefixes:
|
||||
if state.rargs[0].startswith(prefix):
|
||||
done = True
|
||||
if not done:
|
||||
value.append(state.rargs.pop(0))
|
||||
else:
|
||||
# grab everything remaining
|
||||
value += state.rargs
|
||||
state.rargs[:] = []
|
||||
value = tuple(value)
|
||||
# call the actual process
|
||||
self._previous_parser_process(value, state)
|
||||
|
||||
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||
for name in self.opts:
|
||||
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||
if our_parser:
|
||||
self._eat_all_parser = our_parser
|
||||
self._previous_parser_process = our_parser.process
|
||||
our_parser.process = parser_process
|
||||
break
|
||||
return retval
|
||||
|
||||
def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
|
||||
def flatten(data):
|
||||
if isinstance(data, tuple):
|
||||
for x in data:
|
||||
yield from flatten(x)
|
||||
else:
|
||||
yield data
|
||||
|
||||
# there will be nested tuples to flatten when multiple=True
|
||||
value = super(MultiOption, self).type_cast_value(ctx, value)
|
||||
if value:
|
||||
value = tuple(flatten(value))
|
||||
return value
|
||||
@@ -1,20 +1,10 @@
|
||||
from pathlib import Path, PurePath
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
from dbt.cli.option_types import YAML, WarnErrorOptionsType
|
||||
from dbt.cli.options import MultiOption
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
|
||||
|
||||
# TODO: The name (reflected in flags) is a correction!
|
||||
# The original name was `SEND_ANONYMOUS_USAGE_STATS` and used an env var called "DBT_SEND_ANONYMOUS_USAGE_STATS"
|
||||
# Both of which break existing naming conventions (doesn't match param flag).
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
anonymous_usage_stats = click.option(
|
||||
"--anonymous-usage-stats/--no-anonymous-usage-stats",
|
||||
envvar="DBT_ANONYMOUS_USAGE_STATS",
|
||||
help="Send anonymous usage stats to dbt Labs.",
|
||||
default=True,
|
||||
)
|
||||
from dbt.version import get_version_information
|
||||
|
||||
args = click.option(
|
||||
"--args",
|
||||
@@ -33,28 +23,28 @@ browser = click.option(
|
||||
cache_selected_only = click.option(
|
||||
"--cache-selected-only/--no-cache-selected-only",
|
||||
envvar="DBT_CACHE_SELECTED_ONLY",
|
||||
help="Pre cache database objects relevant to selected resource only.",
|
||||
help="At start of run, populate relational cache only for schemas containing selected nodes, or for all schemas of interest.",
|
||||
)
|
||||
|
||||
introspect = click.option(
|
||||
"--introspect/--no-introspect",
|
||||
envvar="DBT_INTROSPECT",
|
||||
help="Whether to scaffold introspective queries as part of compilation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_docs = click.option(
|
||||
"--compile/--no-compile",
|
||||
envvar=None,
|
||||
help="Wether or not to run 'dbt compile' as part of docs generation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_parse = click.option(
|
||||
"--compile/--no-compile",
|
||||
envvar=None,
|
||||
help="TODO: No help text currently available",
|
||||
help="Whether or not to run 'dbt compile' as part of docs generation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
config_dir = click.option(
|
||||
"--config-dir",
|
||||
envvar=None,
|
||||
help="If specified, DBT will show path information for this project",
|
||||
type=click.STRING,
|
||||
help="Show the configured location for the profiles.yml file and exit",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
debug = click.option(
|
||||
@@ -64,14 +54,19 @@ debug = click.option(
|
||||
help="Display debug logging during dbt execution. Useful for debugging and making bug reports.",
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
# The original name was `DEFER_MODE` and used an env var called "DBT_DEFER_TO_STATE"
|
||||
# Both of which break existing naming conventions.
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
# flag was previously named DEFER_MODE
|
||||
defer = click.option(
|
||||
"--defer/--no-defer",
|
||||
envvar="DBT_DEFER",
|
||||
help="If set, defer to the state variable for resolving unselected nodes.",
|
||||
help="If set, resolve unselected nodes by deferring to the manifest within the --state directory.",
|
||||
)
|
||||
|
||||
deprecated_defer = click.option(
|
||||
"--deprecated-defer",
|
||||
envvar="DBT_DEFER_TO_STATE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
default=False,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
enable_legacy_logger = click.option(
|
||||
@@ -80,7 +75,14 @@ enable_legacy_logger = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
|
||||
exclude = click.option(
|
||||
"--exclude",
|
||||
envvar=None,
|
||||
type=tuple,
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
help="Specify the nodes to exclude.",
|
||||
)
|
||||
|
||||
fail_fast = click.option(
|
||||
"--fail-fast/--no-fail-fast",
|
||||
@@ -89,6 +91,18 @@ fail_fast = click.option(
|
||||
help="Stop execution on first failure.",
|
||||
)
|
||||
|
||||
favor_state = click.option(
|
||||
"--favor-state/--no-favor-state",
|
||||
envvar="DBT_FAVOR_STATE",
|
||||
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
|
||||
)
|
||||
|
||||
deprecated_favor_state = click.option(
|
||||
"--deprecated-favor-state",
|
||||
envvar="DBT_FAVOR_STATE_MODE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
)
|
||||
|
||||
full_refresh = click.option(
|
||||
"--full-refresh",
|
||||
"-f",
|
||||
@@ -100,30 +114,69 @@ full_refresh = click.option(
|
||||
indirect_selection = click.option(
|
||||
"--indirect-selection",
|
||||
envvar="DBT_INDIRECT_SELECTION",
|
||||
help="Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.",
|
||||
type=click.Choice(["eager", "cautious"], case_sensitive=False),
|
||||
help="Choose which tests to select that are adjacent to selected resources. Eager is most inclusive, cautious is most exclusive, and buildable is in between. Empty includes no tests at all.",
|
||||
type=click.Choice(["eager", "cautious", "buildable", "empty"], case_sensitive=False),
|
||||
default="eager",
|
||||
)
|
||||
|
||||
log_cache_events = click.option(
|
||||
"--log-cache-events/--no-log-cache-events",
|
||||
help="Enable verbose adapter cache logging.",
|
||||
help="Enable verbose logging for relational cache events to help when debugging.",
|
||||
envvar="DBT_LOG_CACHE_EVENTS",
|
||||
)
|
||||
|
||||
log_format = click.option(
|
||||
"--log-format",
|
||||
envvar="DBT_LOG_FORMAT",
|
||||
help="Specify the log format, overriding the command's default.",
|
||||
type=click.Choice(["text", "json", "default"], case_sensitive=False),
|
||||
help="Specify the format of logging to the console and the log file. Use --log-format-file to configure the format for the log file differently than the console.",
|
||||
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||
default="default",
|
||||
)
|
||||
|
||||
log_format_file = click.option(
|
||||
"--log-format-file",
|
||||
envvar="DBT_LOG_FORMAT_FILE",
|
||||
help="Specify the format of logging to the log file by overriding the default value and the general --log-format setting.",
|
||||
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||
default="debug",
|
||||
)
|
||||
|
||||
log_level = click.option(
|
||||
"--log-level",
|
||||
envvar="DBT_LOG_LEVEL",
|
||||
help="Specify the minimum severity of events that are logged to the console and the log file. Use --log-level-file to configure the severity for the log file differently than the console.",
|
||||
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||
default="info",
|
||||
)
|
||||
|
||||
log_level_file = click.option(
|
||||
"--log-level-file",
|
||||
envvar="DBT_LOG_LEVEL_FILE",
|
||||
help="Specify the minimum severity of events that are logged to the log file by overriding the default value and the general --log-level setting.",
|
||||
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||
default="debug",
|
||||
)
|
||||
|
||||
use_colors = click.option(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Specify whether log output is colorized in the console and the log file. Use --use-colors-file/--no-use-colors-file to colorize the log file differently than the console.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_colors_file = click.option(
|
||||
"--use-colors-file/--no-use-colors-file",
|
||||
envvar="DBT_USE_COLORS_FILE",
|
||||
help="Specify whether log file output is colorized by overriding the default value and the general --use-colors/--no-use-colors setting.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
||||
type=click.Path(),
|
||||
default=None,
|
||||
type=click.Path(resolve_path=True, path_type=Path),
|
||||
)
|
||||
|
||||
macro_debugging = click.option(
|
||||
@@ -132,41 +185,51 @@ macro_debugging = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
models = click.option(
|
||||
"-m",
|
||||
"-s",
|
||||
"models",
|
||||
envvar=None,
|
||||
help="Specify the nodes to include.",
|
||||
multiple=True,
|
||||
)
|
||||
|
||||
# This less standard usage of --output where output_path below is more standard
|
||||
output = click.option(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="TODO: No current help text",
|
||||
help="Specify the output format: either JSON or a newline-delimited list of selectors, paths, or names",
|
||||
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
|
||||
default="name",
|
||||
default="selector",
|
||||
)
|
||||
|
||||
show_output_format = click.option(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="Output format for dbt compile and dbt show",
|
||||
type=click.Choice(["json", "text"], case_sensitive=False),
|
||||
default="text",
|
||||
)
|
||||
|
||||
show_limit = click.option(
|
||||
"--limit",
|
||||
envvar=None,
|
||||
help="Limit the number of results returned by dbt show",
|
||||
type=click.INT,
|
||||
default=5,
|
||||
)
|
||||
|
||||
output_keys = click.option(
|
||||
"--output-keys", envvar=None, help="TODO: No current help text", type=click.STRING
|
||||
"--output-keys",
|
||||
envvar=None,
|
||||
help=(
|
||||
"Space-delimited listing of node properties to include as custom keys for JSON output "
|
||||
"(e.g. `--output json --output-keys name resource_type description`)"
|
||||
),
|
||||
type=tuple,
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
default=[],
|
||||
)
|
||||
|
||||
output_path = click.option(
|
||||
"--output",
|
||||
"-o",
|
||||
envvar=None,
|
||||
help="Specify the output path for the json report. By default, outputs to 'target/sources.json'",
|
||||
help="Specify the output path for the JSON report. By default, outputs to 'target/sources.json'",
|
||||
type=click.Path(file_okay=True, dir_okay=False, writable=True),
|
||||
default=PurePath.joinpath(Path.cwd(), "target/sources.json"),
|
||||
)
|
||||
|
||||
parse_only = click.option(
|
||||
"--parse-only",
|
||||
envvar=None,
|
||||
help="TODO: No help text currently available",
|
||||
is_flag=True,
|
||||
default=None,
|
||||
)
|
||||
|
||||
partial_parse = click.option(
|
||||
@@ -176,6 +239,13 @@ partial_parse = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
help="At start of run, use `show` or `information_schema` queries to populate a relational cache, which can speed up subsequent materializations.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
port = click.option(
|
||||
"--port",
|
||||
envvar=None,
|
||||
@@ -184,10 +254,6 @@ port = click.option(
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
# The original name was `NO_PRINT` and used the env var `DBT_NO_PRINT`.
|
||||
# Both of which break existing naming conventions.
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
print = click.option(
|
||||
"--print/--no-print",
|
||||
envvar="DBT_PRINT",
|
||||
@@ -195,6 +261,15 @@ print = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
deprecated_print = click.option(
|
||||
"--deprecated-print/--deprecated-no-print",
|
||||
envvar="DBT_NO_PRINT",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
default=True,
|
||||
hidden=True,
|
||||
callback=lambda ctx, param, value: not value,
|
||||
)
|
||||
|
||||
printer_width = click.option(
|
||||
"--printer-width",
|
||||
envvar="DBT_PRINTER_WIDTH",
|
||||
@@ -213,20 +288,32 @@ profiles_dir = click.option(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
default=default_profiles_dir(),
|
||||
default=default_profiles_dir,
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
# `dbt debug` uses this because it implements custom behaviour for non-existent profiles.yml directories
|
||||
# `dbt deps` does not load a profile at all
|
||||
# `dbt init` will write profiles.yml if it doesn't yet exist
|
||||
profiles_dir_exists_false = click.option(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
default=default_profiles_dir,
|
||||
type=click.Path(exists=False),
|
||||
)
|
||||
|
||||
project_dir = click.option(
|
||||
"--project-dir",
|
||||
envvar=None,
|
||||
envvar="DBT_PROJECT_DIR",
|
||||
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
||||
default=default_project_dir(),
|
||||
default=default_project_dir,
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
quiet = click.option(
|
||||
"--quiet/--no-quiet",
|
||||
"-q",
|
||||
envvar="DBT_QUIET",
|
||||
help="Suppress all non-error logging to stdout. Does not affect {{ print() }} macro calls.",
|
||||
)
|
||||
@@ -240,10 +327,11 @@ record_timing_info = click.option(
|
||||
)
|
||||
|
||||
resource_type = click.option(
|
||||
"--resource-types",
|
||||
"--resource-type",
|
||||
envvar=None,
|
||||
help="TODO: No current help text",
|
||||
type=click.Choice(
|
||||
help="Restricts the types of resources that dbt will include",
|
||||
type=ChoiceTuple(
|
||||
[
|
||||
"metric",
|
||||
"source",
|
||||
@@ -258,35 +346,100 @@ resource_type = click.option(
|
||||
],
|
||||
case_sensitive=False,
|
||||
),
|
||||
default="default",
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
default=(),
|
||||
)
|
||||
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
"envvar": None,
|
||||
"help": "Specify the nodes to include.",
|
||||
"cls": MultiOption,
|
||||
"multiple": True,
|
||||
"type": tuple,
|
||||
}
|
||||
|
||||
inline = click.option(
|
||||
"--inline",
|
||||
envvar=None,
|
||||
help="Pass SQL inline to dbt compile and show",
|
||||
)
|
||||
|
||||
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
|
||||
selector = click.option(
|
||||
"--selector", envvar=None, help="The selector name to use, as defined in selectors.yml"
|
||||
"--selector",
|
||||
envvar=None,
|
||||
help="The selector name to use, as defined in selectors.yml",
|
||||
)
|
||||
|
||||
send_anonymous_usage_stats = click.option(
|
||||
"--send-anonymous-usage-stats/--no-send-anonymous-usage-stats",
|
||||
envvar="DBT_SEND_ANONYMOUS_USAGE_STATS",
|
||||
help="Send anonymous usage stats to dbt Labs.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show = click.option(
|
||||
"--show", envvar=None, help="Show a sample of the loaded data in the terminal", is_flag=True
|
||||
"--show",
|
||||
envvar=None,
|
||||
help="Show a sample of the loaded data in the terminal",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
# TODO: The env var is a correction!
|
||||
# The original env var was `DBT_TEST_SINGLE_THREADED`.
|
||||
# This broke the existing naming convention.
|
||||
# This will need to be communicated as a change to the community!
|
||||
#
|
||||
# N.B. This flag is only used for testing, hence it's hidden from help text.
|
||||
single_threaded = click.option(
|
||||
"--single-threaded/--no-single-threaded",
|
||||
envvar="DBT_SINGLE_THREADED",
|
||||
default=False,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
skip_profile_setup = click.option(
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
|
||||
"--skip-profile-setup",
|
||||
"-s",
|
||||
envvar=None,
|
||||
help="Skip interactive profile setup.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
# The original name was `ARTIFACT_STATE_PATH` and used the env var `DBT_ARTIFACT_STATE_PATH`.
|
||||
# Both of which break existing naming conventions.
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
state = click.option(
|
||||
"--state",
|
||||
envvar="DBT_STATE",
|
||||
help="If set, use the given directory as the source for json files to compare with this project.",
|
||||
help="If set, use the given directory as the source for JSON files to compare with this project.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=False,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
deprecated_state = click.option(
|
||||
"--deprecated-state",
|
||||
envvar="DBT_ARTIFACT_STATE_PATH",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
hidden=True,
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -305,7 +458,10 @@ store_failures = click.option(
|
||||
)
|
||||
|
||||
target = click.option(
|
||||
"--target", "-t", envvar=None, help="Which target to load for the given profile"
|
||||
"--target",
|
||||
"-t",
|
||||
envvar=None,
|
||||
help="Which target to load for the given profile",
|
||||
)
|
||||
|
||||
target_path = click.option(
|
||||
@@ -319,17 +475,10 @@ threads = click.option(
|
||||
"--threads",
|
||||
envvar=None,
|
||||
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
||||
default=1,
|
||||
default=None,
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
use_colors = click.option(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Output is colorized by default and may also be set in a profile or at the command line.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_experimental_parser = click.option(
|
||||
"--use-experimental-parser/--no-use-experimental-parser",
|
||||
envvar="DBT_USE_EXPERIMENTAL_PARSER",
|
||||
@@ -341,19 +490,35 @@ vars = click.option(
|
||||
envvar=None,
|
||||
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||
type=YAML(),
|
||||
default="{}",
|
||||
)
|
||||
|
||||
|
||||
# TODO: when legacy flags are deprecated use
|
||||
# click.version_option instead of a callback
|
||||
def _version_callback(ctx, _param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
click.echo(get_version_information())
|
||||
ctx.exit()
|
||||
|
||||
|
||||
version = click.option(
|
||||
"--version",
|
||||
"-V",
|
||||
"-v",
|
||||
callback=_version_callback,
|
||||
envvar=None,
|
||||
help="Show version information",
|
||||
expose_value=False,
|
||||
help="Show version information and exit",
|
||||
is_eager=True,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
version_check = click.option(
|
||||
"--version-check/--no-version-check",
|
||||
envvar="DBT_VERSION_CHECK",
|
||||
help="Ensure dbt's version matches the one specified in the dbt_project.yml file ('require-dbt-version')",
|
||||
help="If set, ensure the installed dbt version matches the require-dbt-version specified in the dbt_project.yml file (if any). Otherwise, allow them to differ.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
@@ -362,13 +527,13 @@ warn_error = click.option(
|
||||
envvar="DBT_WARN_ERROR",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
default=None,
|
||||
flag_value=True,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
warn_error_options = click.option(
|
||||
"--warn-error-options",
|
||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||
default=None,
|
||||
default="{}",
|
||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||
type=WarnErrorOptionsType(),
|
||||
@@ -377,13 +542,6 @@ warn_error_options = click.option(
|
||||
write_json = click.option(
|
||||
"--write-json/--no-write-json",
|
||||
envvar="DBT_WRITE_JSON",
|
||||
help="Writing the manifest and run_results.json files to disk",
|
||||
default=True,
|
||||
)
|
||||
|
||||
write_manifest = click.option(
|
||||
"--write-manifest/--no-write-manifest",
|
||||
envvar=None,
|
||||
help="TODO: No help text currently available",
|
||||
help="Whether or not to write the manifest.json and run_results.json files to the target directory",
|
||||
default=True,
|
||||
)
|
||||
|
||||
258
core/dbt/cli/requires.py
Normal file
258
core/dbt/cli/requires.py
Normal file
@@ -0,0 +1,258 @@
|
||||
import dbt.tracking
|
||||
from dbt.version import installed as installed_version
|
||||
from dbt.adapters.factory import adapter_management, register_adapter
|
||||
from dbt.flags import set_flags, get_flag_dict
|
||||
from dbt.cli.exceptions import (
|
||||
ExceptionExit,
|
||||
ResultExit,
|
||||
)
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||
from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger
|
||||
from dbt.events.types import (
|
||||
CommandCompleted,
|
||||
MainReportVersion,
|
||||
MainReportArgs,
|
||||
MainTrackingUserState,
|
||||
)
|
||||
from dbt.events.helpers import get_json_string_utcnow
|
||||
from dbt.events.types import MainEncounteredError, MainStackTrace
|
||||
from dbt.exceptions import Exception as DbtException, DbtProjectError, FailFastError
|
||||
from dbt.parser.manifest import ManifestLoader, write_manifest
|
||||
from dbt.profiler import profiler
|
||||
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||
from dbt.utils import cast_dict_to_dict_of_strings
|
||||
|
||||
from click import Context
|
||||
from functools import update_wrapper
|
||||
import time
|
||||
import traceback
|
||||
|
||||
|
||||
def preflight(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
ctx.obj = ctx.obj or {}
|
||||
|
||||
# Flags
|
||||
flags = Flags(ctx)
|
||||
ctx.obj["flags"] = flags
|
||||
set_flags(flags)
|
||||
|
||||
# Logging
|
||||
callbacks = ctx.obj.get("callbacks", [])
|
||||
set_invocation_id()
|
||||
setup_event_logger(flags=flags, callbacks=callbacks)
|
||||
|
||||
# Tracking
|
||||
initialize_from_flags(flags.SEND_ANONYMOUS_USAGE_STATS, flags.PROFILES_DIR)
|
||||
ctx.with_resource(track_run(run_command=flags.WHICH))
|
||||
|
||||
# Now that we have our logger, fire away!
|
||||
fire_event(MainReportVersion(version=str(installed_version), log_version=LOG_VERSION))
|
||||
flags_dict_str = cast_dict_to_dict_of_strings(get_flag_dict())
|
||||
fire_event(MainReportArgs(args=flags_dict_str))
|
||||
|
||||
# Deprecation warnings
|
||||
flags.fire_deprecations()
|
||||
|
||||
if active_user is not None: # mypy appeasement, always true
|
||||
fire_event(MainTrackingUserState(user_state=active_user.state()))
|
||||
|
||||
# Profiling
|
||||
if flags.RECORD_TIMING_INFO:
|
||||
ctx.with_resource(profiler(enable=True, outfile=flags.RECORD_TIMING_INFO))
|
||||
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def postflight(func):
|
||||
"""The decorator that handles all exception handling for the click commands.
|
||||
This decorator must be used before any other decorators that may throw an exception."""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
start_func = time.perf_counter()
|
||||
success = False
|
||||
|
||||
try:
|
||||
result, success = func(*args, **kwargs)
|
||||
except FailFastError as e:
|
||||
fire_event(MainEncounteredError(exc=str(e)))
|
||||
raise ResultExit(e.result)
|
||||
except DbtException as e:
|
||||
fire_event(MainEncounteredError(exc=str(e)))
|
||||
raise ExceptionExit(e)
|
||||
except BaseException as e:
|
||||
fire_event(MainEncounteredError(exc=str(e)))
|
||||
fire_event(MainStackTrace(stack_trace=traceback.format_exc()))
|
||||
raise ExceptionExit(e)
|
||||
finally:
|
||||
fire_event(
|
||||
CommandCompleted(
|
||||
command=ctx.command_path,
|
||||
success=success,
|
||||
completed_at=get_json_string_utcnow(),
|
||||
elapsed=time.perf_counter() - start_func,
|
||||
)
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise ResultExit(result)
|
||||
|
||||
return (result, success)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
# TODO: UnsetProfile is necessary for deps and clean to load a project.
|
||||
# This decorator and its usage can be removed once https://github.com/dbt-labs/dbt-core/issues/6257 is closed.
|
||||
def unset_profile(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
profile = UnsetProfile()
|
||||
ctx.obj["profile"] = profile
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def profile(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
# TODO: Generalize safe access to flags.THREADS:
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6259
|
||||
threads = getattr(flags, "THREADS", None)
|
||||
profile = load_profile(flags.PROJECT_DIR, flags.VARS, flags.PROFILE, flags.TARGET, threads)
|
||||
ctx.obj["profile"] = profile
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def project(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
# TODO: Decouple target from profile, and remove the need for profile here:
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6257
|
||||
if not ctx.obj.get("profile"):
|
||||
raise DbtProjectError("profile required for project")
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
project = load_project(
|
||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
||||
)
|
||||
ctx.obj["project"] = project
|
||||
|
||||
if dbt.tracking.active_user is not None:
|
||||
project_id = None if project is None else project.hashed_name()
|
||||
|
||||
dbt.tracking.track_project_id({"project_id": project_id})
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def runtime_config(func):
|
||||
"""A decorator used by click command functions for generating a runtime
|
||||
config given a profile and project.
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["profile", "project"]
|
||||
reqs = [ctx.obj.get(req_str) for req_str in req_strs]
|
||||
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile and project required for runtime_config")
|
||||
|
||||
config = RuntimeConfig.from_parts(
|
||||
ctx.obj["project"],
|
||||
ctx.obj["profile"],
|
||||
ctx.obj["flags"],
|
||||
)
|
||||
|
||||
ctx.obj["runtime_config"] = config
|
||||
|
||||
if dbt.tracking.active_user is not None:
|
||||
adapter_type = (
|
||||
getattr(config.credentials, "type", None)
|
||||
if hasattr(config, "credentials")
|
||||
else None
|
||||
)
|
||||
adapter_unique_id = (
|
||||
config.credentials.hashed_unique_field()
|
||||
if hasattr(config, "credentials")
|
||||
else None
|
||||
)
|
||||
|
||||
dbt.tracking.track_adapter_info(
|
||||
{
|
||||
"adapter_type": adapter_type,
|
||||
"adapter_unique_id": adapter_unique_id,
|
||||
}
|
||||
)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def manifest(*args0, write=True, write_perf_info=False):
|
||||
"""A decorator used by click command functions for generating a manifest
|
||||
given a profile, project, and runtime config. This also registers the adapter
|
||||
from the runtime config and conditionally writes the manifest to disk.
|
||||
"""
|
||||
|
||||
def outer_wrapper(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["profile", "project", "runtime_config"]
|
||||
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
||||
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile, project, and runtime_config required for manifest")
|
||||
|
||||
runtime_config = ctx.obj["runtime_config"]
|
||||
register_adapter(runtime_config)
|
||||
|
||||
# a manifest has already been set on the context, so don't overwrite it
|
||||
if ctx.obj.get("manifest") is None:
|
||||
manifest = ManifestLoader.get_full_manifest(
|
||||
runtime_config, write_perf_info=write_perf_info
|
||||
)
|
||||
|
||||
ctx.obj["manifest"] = manifest
|
||||
if write and ctx.obj["flags"].write_json:
|
||||
write_manifest(manifest, ctx.obj["runtime_config"].project_target_path)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
# if there are no args, the decorator was used without params @decorator
|
||||
# otherwise, the decorator was called with params @decorator(arg)
|
||||
if len(args0) == 0:
|
||||
return outer_wrapper
|
||||
return outer_wrapper(args0[0])
|
||||
@@ -1,11 +1,31 @@
|
||||
from pathlib import Path
|
||||
from dbt.config.project import PartialProject
|
||||
from dbt.exceptions import DbtProjectError
|
||||
|
||||
|
||||
def default_project_dir():
|
||||
def default_project_dir() -> Path:
|
||||
paths = list(Path.cwd().parents)
|
||||
paths.insert(0, Path.cwd())
|
||||
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
|
||||
|
||||
|
||||
def default_profiles_dir():
|
||||
def default_profiles_dir() -> Path:
|
||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||
|
||||
|
||||
def default_log_path(project_dir: Path, verify_version: bool = False) -> Path:
|
||||
"""If available, derive a default log path from dbt_project.yml. Otherwise, default to "logs".
|
||||
Known limitations:
|
||||
1. Using PartialProject here, so no jinja rendering of log-path.
|
||||
2. Programmatic invocations of the cli via dbtRunner may pass a Project object directly,
|
||||
which is not being taken into consideration here to extract a log-path.
|
||||
"""
|
||||
default_log_path = Path("logs")
|
||||
try:
|
||||
partial = PartialProject.from_project_root(str(project_dir), verify_version=verify_version)
|
||||
partial_log_path = partial.project_dict.get("log-path") or default_log_path
|
||||
default_log_path = Path(project_dir) / partial_log_path
|
||||
except DbtProjectError:
|
||||
pass
|
||||
|
||||
return default_log_path
|
||||
|
||||
@@ -40,7 +40,7 @@ from dbt.exceptions import (
|
||||
UndefinedCompilationError,
|
||||
UndefinedMacroError,
|
||||
)
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage
|
||||
|
||||
|
||||
@@ -99,8 +99,9 @@ class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):
|
||||
If the value is 'write', also write the files to disk.
|
||||
WARNING: This can write a ton of data if you aren't careful.
|
||||
"""
|
||||
if filename == "<template>" and flags.MACRO_DEBUGGING:
|
||||
write = flags.MACRO_DEBUGGING == "write"
|
||||
macro_debugging = get_flags().MACRO_DEBUGGING
|
||||
if filename == "<template>" and macro_debugging:
|
||||
write = macro_debugging == "write"
|
||||
filename = _linecache_inject(source, write)
|
||||
|
||||
return super()._compile(source, filename) # type: ignore
|
||||
@@ -482,7 +483,7 @@ def get_environment(
|
||||
native: bool = False,
|
||||
) -> jinja2.Environment:
|
||||
args: Dict[str, List[Union[str, Type[jinja2.ext.Extension]]]] = {
|
||||
"extensions": ["jinja2.ext.do"]
|
||||
"extensions": ["jinja2.ext.do", "jinja2.ext.loopcontrols"]
|
||||
}
|
||||
|
||||
if capture_macros:
|
||||
|
||||
@@ -1,30 +1,31 @@
|
||||
import errno
|
||||
import functools
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import requests
|
||||
import stat
|
||||
from typing import Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
|
||||
from pathspec import PathSpec # type: ignore
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union
|
||||
|
||||
import dbt.exceptions
|
||||
import requests
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
SystemErrorRetrievingModTime,
|
||||
SystemCouldNotWrite,
|
||||
SystemExecutingCmd,
|
||||
SystemStdOut,
|
||||
SystemStdErr,
|
||||
SystemReportReturnCode,
|
||||
)
|
||||
import dbt.exceptions
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
from pathspec import PathSpec # type: ignore
|
||||
|
||||
if sys.platform == "win32":
|
||||
from ctypes import WinDLL, c_bool
|
||||
@@ -75,11 +76,7 @@ def find_matching(
|
||||
relative_path = os.path.relpath(absolute_path, absolute_path_to_search)
|
||||
relative_path_to_root = os.path.join(relative_path_to_search, relative_path)
|
||||
|
||||
modification_time = 0.0
|
||||
try:
|
||||
modification_time = os.path.getmtime(absolute_path)
|
||||
except OSError:
|
||||
fire_event(SystemErrorRetrievingModTime(path=absolute_path))
|
||||
modification_time = os.path.getmtime(absolute_path)
|
||||
if reobj.match(local_file) and (
|
||||
not ignore_spec or not ignore_spec.match_file(relative_path_to_root)
|
||||
):
|
||||
@@ -106,12 +103,18 @@ def load_file_contents(path: str, strip: bool = True) -> str:
|
||||
return to_return
|
||||
|
||||
|
||||
def make_directory(path: str) -> None:
|
||||
@functools.singledispatch
|
||||
def make_directory(path=None) -> None:
|
||||
"""
|
||||
Make a directory and any intermediate directories that don't already
|
||||
exist. This function handles the case where two threads try to create
|
||||
a directory at once.
|
||||
"""
|
||||
raise DbtInternalError(f"Can not create directory from {type(path)} ")
|
||||
|
||||
|
||||
@make_directory.register
|
||||
def _(path: str) -> None:
|
||||
path = convert_path(path)
|
||||
if not os.path.exists(path):
|
||||
# concurrent writes that try to create the same dir can fail
|
||||
@@ -125,6 +128,11 @@ def make_directory(path: str) -> None:
|
||||
raise e
|
||||
|
||||
|
||||
@make_directory.register
|
||||
def _(path: Path) -> None:
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool:
|
||||
"""
|
||||
Make a file at `path` assuming that the directory it resides in already
|
||||
@@ -203,7 +211,7 @@ def _windows_rmdir_readonly(func: Callable[[str], Any], path: str, exc: Tuple[An
|
||||
|
||||
def resolve_path_from_base(path_to_resolve: str, base_path: str) -> str:
|
||||
"""
|
||||
If path-to_resolve is a relative path, create an absolute path
|
||||
If path_to_resolve is a relative path, create an absolute path
|
||||
with base_path as the base.
|
||||
|
||||
If path_to_resolve is an absolute path or a user path (~), just
|
||||
@@ -441,8 +449,8 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T
|
||||
except OSError as exc:
|
||||
_interpret_oserror(exc, cwd, cmd)
|
||||
|
||||
fire_event(SystemStdOut(bmsg=out))
|
||||
fire_event(SystemStdErr(bmsg=err))
|
||||
fire_event(SystemStdOut(bmsg=str(out)))
|
||||
fire_event(SystemStdErr(bmsg=str(err)))
|
||||
|
||||
if proc.returncode != 0:
|
||||
fire_event(SystemReportReturnCode(returncode=proc.returncode))
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
|
||||
import argparse
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from dbt import flags
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
@@ -32,6 +33,7 @@ from dbt.events.contextvars import get_node_info
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -48,6 +50,7 @@ def print_compile_stats(stats):
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Group: "group",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -85,15 +88,18 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[metric.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
for group in manifest.groups.values():
|
||||
stats[group.resource_type] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def _add_prepended_cte(prepended_ctes, new_cte):
|
||||
for cte in prepended_ctes:
|
||||
if cte.id == new_cte.id:
|
||||
if cte.id == new_cte.id and new_cte.sql:
|
||||
cte.sql = new_cte.sql
|
||||
return
|
||||
prepended_ctes.append(new_cte)
|
||||
if new_cte.sql:
|
||||
prepended_ctes.append(new_cte)
|
||||
|
||||
|
||||
def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
@@ -161,7 +167,7 @@ class Compiler:
|
||||
self.config = config
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.target_path)
|
||||
make_directory(self.config.project_target_path)
|
||||
make_directory(self.config.packages_install_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
@@ -257,16 +263,18 @@ class Compiler:
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an unparsed node", model)
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
if len(model.extra_ctes) == 0:
|
||||
# SeedNodes don't have compilation attributes
|
||||
if not isinstance(model, SeedNode):
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
return (model, [])
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
@@ -275,7 +283,8 @@ class Compiler:
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
# ephemeral model. InjectedCTEs have a unique_id and sql.
|
||||
# extra_ctes start out with sql set to None, and the sql is set in this loop.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise DbtInternalError(
|
||||
@@ -288,23 +297,23 @@ class Compiler:
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise DbtInternalError(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
# This model has already been compiled and extra_ctes_injected, so it's been
|
||||
# through here before. We already checked above for extra_ctes_injected, but
|
||||
# checking again because updates maybe have happened in another thread.
|
||||
if cte_model.compiled is True and cte_model.extra_ctes_injected is True:
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
# Render the raw_code and set compiled to True
|
||||
cte_model = self._compile_code(cte_model, manifest, extra_context)
|
||||
# recursively call this method, sets extra_ctes_injected to True
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
# Write compiled SQL file
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
@@ -314,24 +323,25 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict(omit_none=True))
|
||||
manifest.update_node(model)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
|
||||
return model, prepended_ctes
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
|
||||
# Sets compiled fields in the ManifestSQLNode passed in,
|
||||
# Sets compiled_code and compiled flag in the ManifestSQLNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_node(
|
||||
def _compile_code(
|
||||
self,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
@@ -340,24 +350,7 @@ class Compiler:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
data = node.to_dict(omit_none=True)
|
||||
data.update(
|
||||
{
|
||||
"compiled": False,
|
||||
"compiled_code": None,
|
||||
"extra_ctes_injected": False,
|
||||
"extra_ctes": [],
|
||||
}
|
||||
)
|
||||
|
||||
if node.language == ModelLanguage.python:
|
||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
||||
|
||||
# quoating seems like something very specific to sql so far
|
||||
# for all python implementations we are seeing there's no quating.
|
||||
# TODO try to find better way to do this, given that
|
||||
original_quoting = self.config.quoting
|
||||
self.config.quoting = {key: False for key in original_quoting.keys()}
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
|
||||
postfix = jinja.get_rendered(
|
||||
@@ -367,8 +360,6 @@ class Compiler:
|
||||
)
|
||||
# we should NOT jinja render the python model's 'raw code'
|
||||
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
# restore quoting settings in the end since context is lazy evaluated
|
||||
self.config.quoting = original_quoting
|
||||
|
||||
else:
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
@@ -380,11 +371,24 @@ class Compiler:
|
||||
|
||||
node.compiled = True
|
||||
|
||||
# relation_name is set at parse time, except for tests without store_failures,
|
||||
# but cli param can turn on store_failures, so we set here.
|
||||
if (
|
||||
node.resource_type == NodeType.Test
|
||||
and node.relation_name is None
|
||||
and node.is_relational
|
||||
):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
node.relation_name = relation_name
|
||||
|
||||
return node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
graph_path = os.path.join(self.config.target_path, filename)
|
||||
graph_path = os.path.join(self.config.project_target_path, filename)
|
||||
flags = get_flags()
|
||||
if flags.WRITE_JSON:
|
||||
linker.write_graph(graph_path, manifest)
|
||||
|
||||
@@ -482,7 +486,13 @@ class Compiler:
|
||||
|
||||
if write:
|
||||
self.write_graph_file(linker, manifest)
|
||||
print_compile_stats(stats)
|
||||
|
||||
# Do not print these for ListTask's
|
||||
if not (
|
||||
self.config.args.__class__ == argparse.Namespace
|
||||
and self.config.args.cls == list_task.ListTask
|
||||
):
|
||||
print_compile_stats(stats)
|
||||
|
||||
return Graph(linker.graph)
|
||||
|
||||
@@ -496,9 +506,8 @@ class Compiler:
|
||||
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
|
||||
|
||||
if node.compiled_code:
|
||||
node.compiled_path = node.write_node(
|
||||
self.config.target_path, "compiled", node.compiled_code
|
||||
)
|
||||
node.compiled_path = node.get_target_write_path(self.config.target_path, "compiled")
|
||||
node.write_node(self.config.project_root, node.compiled_path, node.compiled_code)
|
||||
return node
|
||||
|
||||
def compile_node(
|
||||
@@ -510,11 +519,17 @@ class Compiler:
|
||||
) -> ManifestSQLNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
the node into a compiled node, and then calls the
|
||||
RunTask.get_hook_sql. It calls '_compile_code' to render
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
if write:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, read_user_config # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
from .project import Project, IsFQNResource, PartialProject # noqa
|
||||
from .runtime import RuntimeConfig # noqa
|
||||
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
@@ -32,22 +32,6 @@ dbt encountered an error while trying to read your profiles.yml file.
|
||||
"""
|
||||
|
||||
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=flags.DEFAULT_PROFILES_DIR
|
||||
)
|
||||
|
||||
|
||||
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
path = os.path.join(profiles_dir, "profiles.yml")
|
||||
|
||||
@@ -197,10 +181,33 @@ class Profile(HasCredentials):
|
||||
args_profile_name: Optional[str],
|
||||
project_profile_name: Optional[str] = None,
|
||||
) -> str:
|
||||
# TODO: Duplicating this method as direct copy of the implementation in dbt.cli.resolvers
|
||||
# dbt.cli.resolvers implementation can't be used because it causes a circular dependency.
|
||||
# This should be removed and use a safe default access on the Flags module when
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6259 is closed.
|
||||
def default_profiles_dir():
|
||||
from pathlib import Path
|
||||
|
||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||
|
||||
profile_name = project_profile_name
|
||||
if args_profile_name is not None:
|
||||
profile_name = args_profile_name
|
||||
if profile_name is None:
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=default_profiles_dir()
|
||||
)
|
||||
raise DbtProjectError(NO_SUPPLIED_PROFILE_ERROR)
|
||||
return profile_name
|
||||
|
||||
@@ -401,11 +408,13 @@ class Profile(HasCredentials):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def render_from_args(
|
||||
def render(
|
||||
cls,
|
||||
args: Any,
|
||||
renderer: ProfileRenderer,
|
||||
project_profile_name: Optional[str],
|
||||
profile_name_override: Optional[str] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> "Profile":
|
||||
"""Given the raw profiles as read from disk and the name of the desired
|
||||
profile if specified, return the profile component of the runtime
|
||||
@@ -421,10 +430,9 @@ class Profile(HasCredentials):
|
||||
target could not be found.
|
||||
:returns Profile: The new Profile object.
|
||||
"""
|
||||
threads_override = getattr(args, "threads", None)
|
||||
target_override = getattr(args, "target", None)
|
||||
flags = get_flags()
|
||||
raw_profiles = read_profile(flags.PROFILES_DIR)
|
||||
profile_name = cls.pick_profile_name(getattr(args, "profile", None), project_profile_name)
|
||||
profile_name = cls.pick_profile_name(profile_name_override, project_profile_name)
|
||||
return cls.from_raw_profiles(
|
||||
raw_profiles=raw_profiles,
|
||||
profile_name=profile_name,
|
||||
|
||||
@@ -12,10 +12,10 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt import flags, deprecations
|
||||
from dbt.flags import get_flags
|
||||
from dbt import deprecations
|
||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
@@ -30,16 +30,16 @@ from dbt.graph import SelectionSpec
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.semver import VersionSpecifier, versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.utils import MultiDict, md5
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.config.selectors import SelectorDict
|
||||
from dbt.contracts.project import (
|
||||
Project as ProjectContract,
|
||||
SemverString,
|
||||
)
|
||||
from dbt.contracts.project import PackageConfig
|
||||
from dbt.contracts.project import PackageConfig, ProjectPackageMetadata
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from .renderer import DbtProjectYamlRenderer
|
||||
from .renderer import DbtProjectYamlRenderer, PackageRenderer
|
||||
from .selectors import (
|
||||
selector_config_from_data,
|
||||
selector_data_from_root,
|
||||
@@ -75,6 +75,11 @@ Validator Error:
|
||||
{error}
|
||||
"""
|
||||
|
||||
MISSING_DBT_PROJECT_ERROR = """\
|
||||
No dbt_project.yml found at expected path {path}
|
||||
Verify that each entry within packages.yml (and their transitive dependencies) contains a file named dbt_project.yml
|
||||
"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IsFQNResource(Protocol):
|
||||
@@ -132,11 +137,10 @@ def _all_source_paths(
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
# We need to turn a list of lists into just a list, then convert to a set to
|
||||
# get only unique elements, then back to a list
|
||||
return list(
|
||||
set(list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)))
|
||||
)
|
||||
paths = chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)
|
||||
# Strip trailing slashes since the path is the same even though the name is not
|
||||
stripped_paths = map(lambda s: s.rstrip("/"), paths)
|
||||
return list(set(stripped_paths))
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
@@ -156,16 +160,14 @@ def value_or(value: Optional[T], default: T) -> T:
|
||||
return value
|
||||
|
||||
|
||||
def _raw_project_from(project_root: str) -> Dict[str, Any]:
|
||||
def load_raw_project(project_root: str) -> Dict[str, Any]:
|
||||
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_yaml_filepath = os.path.join(project_root, "dbt_project.yml")
|
||||
|
||||
# get the project.yml contents
|
||||
if not path_exists(project_yaml_filepath):
|
||||
raise DbtProjectError(
|
||||
"no dbt_project.yml found at expected path {}".format(project_yaml_filepath)
|
||||
)
|
||||
raise DbtProjectError(MISSING_DBT_PROJECT_ERROR.format(path=project_yaml_filepath))
|
||||
|
||||
project_dict = _load_yaml(project_yaml_filepath)
|
||||
|
||||
@@ -289,23 +291,34 @@ class PartialProject(RenderComponents):
|
||||
exc.path = os.path.join(self.project_root, "dbt_project.yml")
|
||||
raise
|
||||
|
||||
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
||||
def render_package_metadata(self, renderer: PackageRenderer) -> ProjectPackageMetadata:
|
||||
packages_data = renderer.render_data(self.packages_dict)
|
||||
packages_config = package_config_from_data(packages_data)
|
||||
if not self.project_name:
|
||||
raise DbtProjectError("Package dbt_project.yml must have a name!")
|
||||
return ProjectPackageMetadata(self.project_name, packages_config.packages)
|
||||
|
||||
def check_config_path(
|
||||
self, project_dict, deprecated_path, expected_path=None, default_value=None
|
||||
):
|
||||
if deprecated_path in project_dict:
|
||||
if exp_path in project_dict:
|
||||
if expected_path in project_dict:
|
||||
msg = (
|
||||
"{deprecated_path} and {exp_path} cannot both be defined. The "
|
||||
"`{deprecated_path}` config has been deprecated in favor of `{exp_path}`. "
|
||||
"{deprecated_path} and {expected_path} cannot both be defined. The "
|
||||
"`{deprecated_path}` config has been deprecated in favor of `{expected_path}`. "
|
||||
"Please update your `dbt_project.yml` configuration to reflect this "
|
||||
"change."
|
||||
)
|
||||
raise DbtProjectError(
|
||||
msg.format(deprecated_path=deprecated_path, exp_path=exp_path)
|
||||
msg.format(deprecated_path=deprecated_path, expected_path=expected_path)
|
||||
)
|
||||
deprecations.warn(
|
||||
f"project-config-{deprecated_path}",
|
||||
deprecated_path=deprecated_path,
|
||||
exp_path=exp_path,
|
||||
)
|
||||
# this field is no longer supported, but many projects may specify it with the default value
|
||||
# if so, let's only raise this deprecation warning if they set a custom value
|
||||
if not default_value or project_dict[deprecated_path] != default_value:
|
||||
kwargs = {"deprecated_path": deprecated_path}
|
||||
if expected_path:
|
||||
kwargs.update({"exp_path": expected_path})
|
||||
deprecations.warn(f"project-config-{deprecated_path}", **kwargs)
|
||||
|
||||
def create_project(self, rendered: RenderComponents) -> "Project":
|
||||
unrendered = RenderComponents(
|
||||
@@ -320,6 +333,8 @@ class PartialProject(RenderComponents):
|
||||
|
||||
self.check_config_path(rendered.project_dict, "source-paths", "model-paths")
|
||||
self.check_config_path(rendered.project_dict, "data-paths", "seed-paths")
|
||||
self.check_config_path(rendered.project_dict, "log-path", default_value="logs")
|
||||
self.check_config_path(rendered.project_dict, "target-path", default_value="target")
|
||||
|
||||
try:
|
||||
ProjectContract.validate(rendered.project_dict)
|
||||
@@ -363,9 +378,13 @@ class PartialProject(RenderComponents):
|
||||
|
||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||
target_path: str = flag_or(flags.TARGET_PATH, cfg.target_path, "target")
|
||||
flags = get_flags()
|
||||
|
||||
flag_target_path = str(flags.TARGET_PATH) if flags.TARGET_PATH else None
|
||||
target_path: str = flag_or(flag_target_path, cfg.target_path, "target")
|
||||
log_path: str = str(flags.LOG_PATH)
|
||||
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = flag_or(flags.LOG_PATH, cfg.log_path, "logs")
|
||||
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
@@ -485,14 +504,7 @@ class PartialProject(RenderComponents):
|
||||
cls, project_root: str, *, verify_version: bool = False
|
||||
) -> "PartialProject":
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_dict = _raw_project_from(project_root)
|
||||
config_version = project_dict.get("config-version", 1)
|
||||
if config_version != 2:
|
||||
raise DbtProjectError(
|
||||
f"Invalid config version: {config_version}, expected 2",
|
||||
path=os.path.join(project_root, "dbt_project.yml"),
|
||||
)
|
||||
|
||||
project_dict = load_raw_project(project_root)
|
||||
packages_dict = package_data_from_root(project_root)
|
||||
selectors_dict = selector_data_from_root(project_root)
|
||||
return cls.from_dicts(
|
||||
@@ -525,7 +537,7 @@ class VarProvider:
|
||||
@dataclass
|
||||
class Project:
|
||||
project_name: str
|
||||
version: Union[SemverString, float]
|
||||
version: Optional[Union[SemverString, float]]
|
||||
project_root: str
|
||||
profile_name: Optional[str]
|
||||
model_paths: List[str]
|
||||
@@ -659,11 +671,11 @@ class Project:
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
) -> "Project":
|
||||
partial = cls.partial_load(project_root, verify_version=verify_version)
|
||||
partial = PartialProject.from_project_root(project_root, verify_version=verify_version)
|
||||
return partial.render(renderer)
|
||||
|
||||
def hashed_name(self):
|
||||
return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
|
||||
return md5(self.project_name)
|
||||
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
@@ -688,3 +700,8 @@ class Project:
|
||||
if dispatch_entry["macro_namespace"] == macro_namespace:
|
||||
return dispatch_entry["search_order"]
|
||||
return None
|
||||
|
||||
@property
|
||||
def project_target_path(self):
|
||||
# If target_path is absolute, project_root will not be included
|
||||
return os.path.join(self.project_root, self.target_path)
|
||||
|
||||
@@ -107,7 +107,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
if profile:
|
||||
self.ctx_obj = TargetContext(profile, cli_vars)
|
||||
self.ctx_obj = TargetContext(profile.to_target_dict(), cli_vars)
|
||||
else:
|
||||
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
||||
context = self.ctx_obj.to_dict()
|
||||
@@ -182,7 +182,17 @@ class SecretRenderer(BaseRenderer):
|
||||
# First, standard Jinja rendering, with special handling for 'secret' environment variables
|
||||
# "{{ env_var('DBT_SECRET_ENV_VAR') }}" -> "$$$DBT_SECRET_START$$$DBT_SECRET_ENV_{VARIABLE_NAME}$$$DBT_SECRET_END$$$"
|
||||
# This prevents Jinja manipulation of secrets via macros/filters that might leak partial/modified values in logs
|
||||
rendered = super().render_value(value, keypath)
|
||||
|
||||
try:
|
||||
rendered = super().render_value(value, keypath)
|
||||
except Exception as ex:
|
||||
if keypath and "password" in keypath:
|
||||
# Passwords sometimes contain jinja-esque characters, but we
|
||||
# don't want to render them if they aren't valid jinja.
|
||||
rendered = value
|
||||
else:
|
||||
raise ex
|
||||
|
||||
# Now, detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$)
|
||||
# and replace them with the actual secret value
|
||||
if SECRET_ENV_PREFIX in str(rendered):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import itertools
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -13,17 +13,18 @@ from typing import (
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.config.project import load_raw_project
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials, HasCredentials
|
||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||
from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
from dbt.exceptions import (
|
||||
ConfigContractBrokenError,
|
||||
DbtProjectError,
|
||||
@@ -31,14 +32,46 @@ from dbt.exceptions import (
|
||||
DbtRuntimeError,
|
||||
UninstalledPackagesFoundError,
|
||||
)
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project, PartialProject
|
||||
from .project import Project
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
|
||||
|
||||
def load_project(
|
||||
project_root: str,
|
||||
version_check: bool,
|
||||
profile: HasCredentials,
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
) -> Project:
|
||||
# get the project with all of the provided information
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = Project.from_project_root(
|
||||
project_root, project_renderer, verify_version=version_check
|
||||
)
|
||||
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
|
||||
|
||||
def load_profile(
|
||||
project_root: str,
|
||||
cli_vars: Dict[str, Any],
|
||||
profile_name_override: Optional[str] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> Profile:
|
||||
raw_project = load_raw_project(project_root)
|
||||
raw_profile_name = raw_project.get("profile")
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
profile_name = profile_renderer.render_value(raw_profile_name)
|
||||
profile = Profile.render(
|
||||
profile_renderer, profile_name, profile_name_override, target_override, threads_override
|
||||
)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
|
||||
|
||||
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
|
||||
@@ -62,6 +95,21 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
def __post_init__(self):
|
||||
self.validate()
|
||||
|
||||
@classmethod
|
||||
def get_profile(
|
||||
cls,
|
||||
project_root: str,
|
||||
cli_vars: Dict[str, Any],
|
||||
args: Any,
|
||||
) -> Profile:
|
||||
return load_profile(
|
||||
project_root,
|
||||
cli_vars,
|
||||
args.profile,
|
||||
args.target,
|
||||
args.threads,
|
||||
)
|
||||
|
||||
# Called by 'new_project' and 'from_args'
|
||||
@classmethod
|
||||
def from_parts(
|
||||
@@ -84,7 +132,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
.replace_dict(_project_quoting_dict(project, profile))
|
||||
).to_dict(omit_none=True)
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
@@ -149,11 +197,10 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
# load the new project and its packages. Don't pass cli variables.
|
||||
renderer = DbtProjectYamlRenderer(profile)
|
||||
|
||||
project = Project.from_project_root(
|
||||
project_root,
|
||||
renderer,
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
verify_version=bool(getattr(self.args, "VERSION_CHECK", True)),
|
||||
)
|
||||
|
||||
runtime_config = self.from_parts(
|
||||
@@ -189,64 +236,19 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
except ValidationError as e:
|
||||
raise ConfigContractBrokenError(e) from e
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
return Profile.render_from_args(args, profile_renderer, profile_name)
|
||||
|
||||
@classmethod
|
||||
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
profile = cls.collect_profile(args=args)
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = cls.collect_project(args=args, project_renderer=project_renderer)
|
||||
assert type(project) is Project
|
||||
return (project, profile)
|
||||
|
||||
@classmethod
|
||||
def collect_profile(
|
||||
cls: Type["RuntimeConfig"], args: Any, profile_name: Optional[str] = None
|
||||
) -> Profile:
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
if profile_name is None:
|
||||
# Note: only the named profile section is rendered here. The rest of the
|
||||
# profile is ignored.
|
||||
partial = cls.collect_project(args)
|
||||
assert type(partial) is PartialProject
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def collect_project(
|
||||
cls: Type["RuntimeConfig"],
|
||||
args: Any,
|
||||
project_renderer: Optional[DbtProjectYamlRenderer] = None,
|
||||
) -> Union[Project, PartialProject]:
|
||||
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
||||
if project_renderer is None:
|
||||
return partial
|
||||
else:
|
||||
project = partial.render(project_renderer)
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||
profile = cls.get_profile(
|
||||
project_root,
|
||||
cli_vars,
|
||||
args,
|
||||
)
|
||||
flags = get_flags()
|
||||
project = load_project(project_root, bool(flags.VERSION_CHECK), profile, cli_vars)
|
||||
return project, profile
|
||||
|
||||
# Called in main.py, lib.py, task/base.py
|
||||
@classmethod
|
||||
@@ -411,8 +413,8 @@ class UnsetCredentials(Credentials):
|
||||
return ()
|
||||
|
||||
|
||||
# This is used by UnsetProfileConfig, for commands which do
|
||||
# not require a profile, i.e. dbt deps and clean
|
||||
# This is used by commands which do not require
|
||||
# a profile, i.e. dbt deps and clean
|
||||
class UnsetProfile(Profile):
|
||||
def __init__(self):
|
||||
self.credentials = UnsetCredentials()
|
||||
@@ -431,182 +433,12 @@ class UnsetProfile(Profile):
|
||||
return Profile.__getattribute__(self, name)
|
||||
|
||||
|
||||
# This class is used by the dbt deps and clean commands, because they don't
|
||||
# require a functioning profile.
|
||||
@dataclass
|
||||
class UnsetProfileConfig(RuntimeConfig):
|
||||
"""This class acts a lot _like_ a RuntimeConfig, except if your profile is
|
||||
missing, any access to profile members results in an exception.
|
||||
"""
|
||||
|
||||
profile_name: str = field(repr=False)
|
||||
target_name: str = field(repr=False)
|
||||
|
||||
def __post_init__(self):
|
||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
||||
# `del` the attrs we don't want users touching.
|
||||
del self.profile_name
|
||||
del self.target_name
|
||||
# don't call super().__post_init__(), as that calls validate(), and
|
||||
# this object isn't very valid
|
||||
|
||||
def __getattribute__(self, name):
|
||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
||||
if name in {"profile_name", "target_name"}:
|
||||
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
# avoid every attribute access triggering infinite recursion
|
||||
return RuntimeConfig.__getattribute__(self, name)
|
||||
|
||||
def to_target_dict(self):
|
||||
# re-override the poisoned profile behavior
|
||||
return DictDefaultEmptyStr({})
|
||||
|
||||
def to_project_config(self, with_packages=False):
|
||||
"""Return a dict representation of the config that could be written to
|
||||
disk with `yaml.safe_dump` to get this configuration.
|
||||
|
||||
Overrides dbt.config.Project.to_project_config to omit undefined profile
|
||||
attributes.
|
||||
|
||||
:param with_packages bool: If True, include the serialized packages
|
||||
file in the root.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = deepcopy(
|
||||
{
|
||||
"name": self.project_name,
|
||||
"version": self.version,
|
||||
"project-root": self.project_root,
|
||||
"profile": "",
|
||||
"model-paths": self.model_paths,
|
||||
"macro-paths": self.macro_paths,
|
||||
"seed-paths": self.seed_paths,
|
||||
"test-paths": self.test_paths,
|
||||
"analysis-paths": self.analysis_paths,
|
||||
"docs-paths": self.docs_paths,
|
||||
"asset-paths": self.asset_paths,
|
||||
"target-path": self.target_path,
|
||||
"snapshot-paths": self.snapshot_paths,
|
||||
"clean-targets": self.clean_targets,
|
||||
"log-path": self.log_path,
|
||||
"quoting": self.quoting,
|
||||
"models": self.models,
|
||||
"on-run-start": self.on_run_start,
|
||||
"on-run-end": self.on_run_end,
|
||||
"dispatch": self.dispatch,
|
||||
"seeds": self.seeds,
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
result["query-comment"] = self.query_comment.to_dict(omit_none=True)
|
||||
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict(omit_none=True))
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
project: Project,
|
||||
profile: Profile,
|
||||
args: Any,
|
||||
dependencies: Optional[Mapping[str, "RuntimeConfig"]] = None,
|
||||
) -> "RuntimeConfig":
|
||||
"""Instantiate a RuntimeConfig from its components.
|
||||
|
||||
:param profile: Ignored.
|
||||
:param project: A parsed dbt Project.
|
||||
:param args: The parsed command-line arguments.
|
||||
:returns RuntimeConfig: The new configuration.
|
||||
"""
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
asset_paths=project.asset_paths,
|
||||
target_path=project.target_path,
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=project.quoting, # we never use this anyway.
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
dispatch=project.dispatch,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
project_env_vars=project.project_env_vars,
|
||||
profile_env_vars=profile.profile_env_vars,
|
||||
profile_name="",
|
||||
target_name="",
|
||||
user_config=UserConfig(),
|
||||
threads=getattr(args, "threads", 1),
|
||||
credentials=UnsetCredentials(),
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
profile = UnsetProfile()
|
||||
# The profile (for warehouse connection) is not needed, but we want
|
||||
# to get the UserConfig, which is also in profiles.yml
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
profile.user_config = user_config
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def from_args(cls: Type[RuntimeConfig], args: Any) -> "RuntimeConfig":
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
read in packages.yml if it exists, and use them to find the profile to
|
||||
load.
|
||||
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises DbtValidationError: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
return cls.from_parts(project=project, profile=profile, args=args)
|
||||
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
||||
Configuration paths exist in your dbt_project.yml file which do not \
|
||||
apply to any resources.
|
||||
There are {} unused configuration paths:
|
||||
{}
|
||||
"""
|
||||
|
||||
|
||||
def _is_config_used(path, fqns):
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from xmlrpc.client import Boolean
|
||||
from dbt.contracts.project import UserConfig
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.config import Profile, Project, read_user_config
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import InvalidOptionYAML
|
||||
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
|
||||
@@ -24,52 +19,6 @@ def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, An
|
||||
return cli_vars
|
||||
else:
|
||||
raise OptionNotYamlDictError(var_type, cli_option_name)
|
||||
except DbtValidationError:
|
||||
except (DbtValidationError, OptionNotYamlDictError):
|
||||
fire_event(InvalidOptionYAML(option_name=cli_option_name))
|
||||
raise
|
||||
|
||||
|
||||
def get_project_config(
|
||||
project_path: str,
|
||||
profile_name: str,
|
||||
args: Namespace = Namespace(),
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
profile: Optional[Profile] = None,
|
||||
user_config: Optional[UserConfig] = None,
|
||||
return_dict: Boolean = True,
|
||||
) -> Union[Project, Dict]:
|
||||
"""Returns a project config (dict or object) from a given project path and profile name.
|
||||
|
||||
Args:
|
||||
project_path: Path to project
|
||||
profile_name: Name of profile
|
||||
args: An argparse.Namespace that represents what would have been passed in on the
|
||||
command line (optional)
|
||||
cli_vars: A dict of any vars that would have been passed in on the command line (optional)
|
||||
(see parse_cli_vars above for formatting details)
|
||||
profile: A dbt.config.profile.Profile object (optional)
|
||||
user_config: A dbt.contracts.project.UserConfig object (optional)
|
||||
return_dict: Return a dict if true, return the full dbt.config.project.Project object if false
|
||||
|
||||
Returns:
|
||||
A full project config
|
||||
|
||||
"""
|
||||
# Generate a profile if not provided
|
||||
if profile is None:
|
||||
# Generate user_config if not provided
|
||||
if user_config is None:
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
# Update flags
|
||||
flags.set_from_args(args, user_config)
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
profile = Profile.render_from_args(args, ProfileRenderer(cli_vars), profile_name)
|
||||
# Generate a project
|
||||
project = Project.from_project_root(
|
||||
project_path,
|
||||
DbtProjectYamlRenderer(profile),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
# Return
|
||||
return project.to_project_config() if return_dict else project
|
||||
|
||||
@@ -2,7 +2,8 @@ import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
import dbt.flags as flags_module
|
||||
from dbt import tracking
|
||||
from dbt import utils
|
||||
from dbt.clients.jinja import get_rendered
|
||||
@@ -635,7 +636,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
||||
"""
|
||||
return flags.get_flag_obj()
|
||||
return flags_module.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
@@ -651,7 +652,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endmacro %}"
|
||||
"""
|
||||
|
||||
if not flags.NO_PRINT:
|
||||
if get_flags().PRINT:
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
|
||||
@@ -16,7 +16,8 @@ class ConfiguredContext(TargetContext):
|
||||
config: AdapterRequiredConfig
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig) -> None:
|
||||
super().__init__(config, config.cli_vars)
|
||||
super().__init__(config.to_target_dict(), config.cli_vars)
|
||||
self.config = config
|
||||
|
||||
@contextproperty
|
||||
def project_name(self) -> str:
|
||||
|
||||
@@ -23,6 +23,8 @@ from dbt.exceptions import (
|
||||
PropertyYMLError,
|
||||
NotImplementedError,
|
||||
RelationWrongTypeError,
|
||||
ContractError,
|
||||
ColumnTypeMissingError,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,6 +67,10 @@ def raise_compiler_error(msg, node=None) -> NoReturn:
|
||||
raise CompilationError(msg, node)
|
||||
|
||||
|
||||
def raise_contract_error(yaml_columns, sql_columns) -> NoReturn:
|
||||
raise ContractError(yaml_columns, sql_columns)
|
||||
|
||||
|
||||
def raise_database_error(msg, node=None) -> NoReturn:
|
||||
raise DbtDatabaseError(msg, node)
|
||||
|
||||
@@ -97,6 +103,10 @@ def relation_wrong_type(relation, expected_type, model=None) -> NoReturn:
|
||||
raise RelationWrongTypeError(relation, expected_type, model)
|
||||
|
||||
|
||||
def column_type_missing(column_names) -> NoReturn:
|
||||
raise ColumnTypeMissingError(column_names)
|
||||
|
||||
|
||||
# Update this when a new function should be added to the
|
||||
# dbt context's `exceptions` key!
|
||||
CONTEXT_EXPORTS = {
|
||||
@@ -119,6 +129,8 @@ CONTEXT_EXPORTS = {
|
||||
raise_invalid_property_yml_version,
|
||||
raise_not_implemented,
|
||||
relation_wrong_type,
|
||||
raise_contract_error,
|
||||
column_type_missing,
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -37,8 +37,11 @@ from dbt.contracts.graph.nodes import (
|
||||
SourceDefinition,
|
||||
Resource,
|
||||
ManifestNode,
|
||||
RefArgs,
|
||||
AccessType,
|
||||
)
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.contracts.graph.unparsed import NodeVersion
|
||||
from dbt.events.functions import get_metadata_vars
|
||||
from dbt.exceptions import (
|
||||
CompilationError,
|
||||
@@ -63,11 +66,12 @@ from dbt.exceptions import (
|
||||
DbtRuntimeError,
|
||||
TargetNotFoundError,
|
||||
DbtValidationError,
|
||||
DbtReferenceError,
|
||||
)
|
||||
from dbt.config import IsFQNResource
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
|
||||
from dbt.utils import merge, AttrDict, MultiDict, args_to_dict
|
||||
from dbt.utils import merge, AttrDict, MultiDict, args_to_dict, cast_to_str
|
||||
|
||||
from dbt import selected_resources
|
||||
|
||||
@@ -212,16 +216,17 @@ class BaseResolver(metaclass=abc.ABCMeta):
|
||||
|
||||
class BaseRefResolver(BaseResolver):
|
||||
@abc.abstractmethod
|
||||
def resolve(self, name: str, package: Optional[str] = None) -> RelationProxy:
|
||||
def resolve(
|
||||
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
||||
) -> RelationProxy:
|
||||
...
|
||||
|
||||
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
||||
if package is None:
|
||||
return [name]
|
||||
else:
|
||||
return [package, name]
|
||||
def _repack_args(
|
||||
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
||||
) -> RefArgs:
|
||||
return RefArgs(package=package, name=name, version=version)
|
||||
|
||||
def validate_args(self, name: str, package: Optional[str]):
|
||||
def validate_args(self, name: str, package: Optional[str], version: Optional[NodeVersion]):
|
||||
if not isinstance(name, str):
|
||||
raise CompilationError(
|
||||
f"The name argument to ref() must be a string, got {type(name)}"
|
||||
@@ -232,9 +237,15 @@ class BaseRefResolver(BaseResolver):
|
||||
f"The package argument to ref() must be a string or None, got {type(package)}"
|
||||
)
|
||||
|
||||
def __call__(self, *args: str) -> RelationProxy:
|
||||
if version is not None and not isinstance(version, (str, int, float)):
|
||||
raise CompilationError(
|
||||
f"The version argument to ref() must be a string, int, float, or None - got {type(version)}"
|
||||
)
|
||||
|
||||
def __call__(self, *args: str, **kwargs) -> RelationProxy:
|
||||
name: str
|
||||
package: Optional[str] = None
|
||||
version: Optional[NodeVersion] = None
|
||||
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
@@ -242,8 +253,10 @@ class BaseRefResolver(BaseResolver):
|
||||
package, name = args
|
||||
else:
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
return self.resolve(name, package)
|
||||
|
||||
version = kwargs.get("version") or kwargs.get("v")
|
||||
self.validate_args(name, package, version)
|
||||
return self.resolve(name, package, version)
|
||||
|
||||
|
||||
class BaseSourceResolver(BaseResolver):
|
||||
@@ -448,8 +461,10 @@ class RuntimeDatabaseWrapper(BaseDatabaseWrapper):
|
||||
|
||||
# `ref` implementations
|
||||
class ParseRefResolver(BaseRefResolver):
|
||||
def resolve(self, name: str, package: Optional[str] = None) -> RelationProxy:
|
||||
self.model.refs.append(self._repack_args(name, package))
|
||||
def resolve(
|
||||
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
||||
) -> RelationProxy:
|
||||
self.model.refs.append(self._repack_args(name, package, version))
|
||||
|
||||
return self.Relation.create_from(self.config, self.model)
|
||||
|
||||
@@ -458,10 +473,16 @@ ResolveRef = Union[Disabled, ManifestNode]
|
||||
|
||||
|
||||
class RuntimeRefResolver(BaseRefResolver):
|
||||
def resolve(self, target_name: str, target_package: Optional[str] = None) -> RelationProxy:
|
||||
def resolve(
|
||||
self,
|
||||
target_name: str,
|
||||
target_package: Optional[str] = None,
|
||||
target_version: Optional[NodeVersion] = None,
|
||||
) -> RelationProxy:
|
||||
target_model = self.manifest.resolve_ref(
|
||||
target_name,
|
||||
target_package,
|
||||
target_version,
|
||||
self.current_project,
|
||||
self.model.package_name,
|
||||
)
|
||||
@@ -472,12 +493,27 @@ class RuntimeRefResolver(BaseRefResolver):
|
||||
target_name=target_name,
|
||||
target_kind="node",
|
||||
target_package=target_package,
|
||||
target_version=target_version,
|
||||
disabled=isinstance(target_model, Disabled),
|
||||
)
|
||||
self.validate(target_model, target_name, target_package)
|
||||
return self.create_relation(target_model, target_name)
|
||||
elif (
|
||||
target_model.resource_type == NodeType.Model
|
||||
and target_model.access == AccessType.Private
|
||||
# don't raise this reference error for ad hoc 'preview' queries
|
||||
and self.model.resource_type != NodeType.SqlOperation
|
||||
and self.model.resource_type != NodeType.RPCCall # TODO: rm
|
||||
):
|
||||
if not self.model.group or self.model.group != target_model.group:
|
||||
raise DbtReferenceError(
|
||||
unique_id=self.model.unique_id,
|
||||
ref_unique_id=target_model.unique_id,
|
||||
group=cast_to_str(target_model.group),
|
||||
)
|
||||
|
||||
def create_relation(self, target_model: ManifestNode, name: str) -> RelationProxy:
|
||||
self.validate(target_model, target_name, target_package, target_version)
|
||||
return self.create_relation(target_model)
|
||||
|
||||
def create_relation(self, target_model: ManifestNode) -> RelationProxy:
|
||||
if target_model.is_ephemeral_model:
|
||||
self.model.set_cte(target_model.unique_id, None)
|
||||
return self.Relation.create_ephemeral_from_node(self.config, target_model)
|
||||
@@ -485,10 +521,14 @@ class RuntimeRefResolver(BaseRefResolver):
|
||||
return self.Relation.create_from(self.config, target_model)
|
||||
|
||||
def validate(
|
||||
self, resolved: ManifestNode, target_name: str, target_package: Optional[str]
|
||||
self,
|
||||
resolved: ManifestNode,
|
||||
target_name: str,
|
||||
target_package: Optional[str],
|
||||
target_version: Optional[NodeVersion],
|
||||
) -> None:
|
||||
if resolved.unique_id not in self.model.depends_on.nodes:
|
||||
args = self._repack_args(target_name, target_package)
|
||||
args = self._repack_args(target_name, target_package, target_version)
|
||||
raise RefBadContextError(node=self.model, args=args)
|
||||
|
||||
|
||||
@@ -498,16 +538,17 @@ class OperationRefResolver(RuntimeRefResolver):
|
||||
resolved: ManifestNode,
|
||||
target_name: str,
|
||||
target_package: Optional[str],
|
||||
target_version: Optional[NodeVersion],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def create_relation(self, target_model: ManifestNode, name: str) -> RelationProxy:
|
||||
def create_relation(self, target_model: ManifestNode) -> RelationProxy:
|
||||
if target_model.is_ephemeral_model:
|
||||
# In operations, we can't ref() ephemeral nodes, because
|
||||
# Macros do not support set_cte
|
||||
raise OperationsCannotRefEphemeralNodesError(target_model.name, node=self.model)
|
||||
else:
|
||||
return super().create_relation(target_model, name)
|
||||
return super().create_relation(target_model)
|
||||
|
||||
|
||||
# `source` implementations
|
||||
@@ -768,7 +809,8 @@ class ProviderContext(ManifestContext):
|
||||
# macros/source defs aren't 'writeable'.
|
||||
if isinstance(self.model, (Macro, SourceDefinition)):
|
||||
raise MacrosSourcesUnWriteableError(node=self.model)
|
||||
self.model.build_path = self.model.write_node(self.config.target_path, "run", payload)
|
||||
self.model.build_path = self.model.get_target_write_path(self.config.target_path, "run")
|
||||
self.model.write_node(self.config.project_root, self.model.build_path, payload)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@@ -1408,10 +1450,18 @@ def generate_runtime_macro_context(
|
||||
|
||||
|
||||
class ExposureRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) not in (1, 2):
|
||||
def __call__(self, *args, **kwargs) -> str:
|
||||
package = None
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.model.refs.append(list(args))
|
||||
|
||||
version = kwargs.get("version") or kwargs.get("v")
|
||||
|
||||
self.model.refs.append(RefArgs(package=package, name=name, version=version))
|
||||
return ""
|
||||
|
||||
|
||||
@@ -1461,7 +1511,7 @@ def generate_parse_exposure(
|
||||
|
||||
|
||||
class MetricRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
def __call__(self, *args, **kwargs) -> str:
|
||||
package = None
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
@@ -1469,11 +1519,14 @@ class MetricRefResolver(BaseResolver):
|
||||
package, name = args
|
||||
else:
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
self.model.refs.append(list(args))
|
||||
|
||||
version = kwargs.get("version") or kwargs.get("v")
|
||||
self.validate_args(name, package, version)
|
||||
|
||||
self.model.refs.append(RefArgs(package=package, name=name, version=version))
|
||||
return ""
|
||||
|
||||
def validate_args(self, name, package):
|
||||
def validate_args(self, name, package, version):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingError(
|
||||
f"In a metrics section in {self.model.original_file_path} "
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from dbt.contracts.connection import HasCredentials
|
||||
|
||||
from dbt.context.base import BaseContext, contextproperty
|
||||
|
||||
|
||||
class TargetContext(BaseContext):
|
||||
# subclass is ConfiguredContext
|
||||
def __init__(self, config: HasCredentials, cli_vars: Dict[str, Any]):
|
||||
def __init__(self, target_dict: Dict[str, Any], cli_vars: Dict[str, Any]):
|
||||
super().__init__(cli_vars=cli_vars)
|
||||
self.config = config
|
||||
self.target_dict = target_dict
|
||||
|
||||
@contextproperty
|
||||
def target(self) -> Dict[str, Any]:
|
||||
@@ -73,9 +71,4 @@ class TargetContext(BaseContext):
|
||||
|----------|-----------|------------------------------------------|
|
||||
|
||||
"""
|
||||
return self.config.to_target_dict()
|
||||
|
||||
|
||||
def generate_target_context(config: HasCredentials, cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
ctx = TargetContext(config, cli_vars)
|
||||
return ctx.to_dict()
|
||||
return self.target_dict
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import abc
|
||||
import itertools
|
||||
import hashlib
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -13,7 +12,7 @@ from typing import (
|
||||
Callable,
|
||||
)
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.utils import translate_aliases
|
||||
from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
@@ -142,7 +141,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
raise NotImplementedError("unique_field not implemented for base credentials class")
|
||||
|
||||
def hashed_unique_field(self) -> str:
|
||||
return hashlib.md5(self.unique_field.encode("utf-8")).hexdigest()
|
||||
return md5(self.unique_field)
|
||||
|
||||
def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]:
|
||||
"""Return an ordered iterator of key/value pairs for pretty-printing."""
|
||||
|
||||
@@ -61,8 +61,6 @@ class FilePath(dbtClassMixin):
|
||||
|
||||
@property
|
||||
def original_file_path(self) -> str:
|
||||
# this is mostly used for reporting errors. It doesn't show the project
|
||||
# name, should it?
|
||||
return os.path.join(self.searched_path, self.relative_path)
|
||||
|
||||
def seed_too_large(self) -> bool:
|
||||
@@ -227,6 +225,7 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
groups: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
|
||||
@@ -29,13 +29,15 @@ from dbt.contracts.graph.nodes import (
|
||||
GenericTestNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Group,
|
||||
UnpatchedSourceDefinition,
|
||||
ManifestNode,
|
||||
GraphMemberNode,
|
||||
ResultNode,
|
||||
BaseNode,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import SourcePatch
|
||||
from dbt.contracts.graph.unparsed import SourcePatch, NodeVersion, UnparsedVersion
|
||||
from dbt.contracts.graph.manifest_upgrade import upgrade_manifest_json
|
||||
from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile
|
||||
from dbt.contracts.util import BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
@@ -47,9 +49,10 @@ from dbt.exceptions import (
|
||||
)
|
||||
from dbt.helper_types import PathSet
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import MergedFromState
|
||||
from dbt.events.types import MergedFromState, UnpinnedRefNewVersionAvailable
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.node_types import NodeType
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags, MP_CONTEXT
|
||||
from dbt import tracking
|
||||
import dbt.utils
|
||||
|
||||
@@ -144,6 +147,7 @@ class SourceLookup(dbtClassMixin):
|
||||
class RefableLookup(dbtClassMixin):
|
||||
# model, seed, snapshot
|
||||
_lookup_types: ClassVar[set] = set(NodeType.refable())
|
||||
_versioned_types: ClassVar[set] = set(NodeType.versioned())
|
||||
|
||||
# refables are actually unique, so the Dict[PackageName, UniqueID] will
|
||||
# only ever have exactly one value, but doing 3 dict lookups instead of 1
|
||||
@@ -152,20 +156,61 @@ class RefableLookup(dbtClassMixin):
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def get_unique_id(self, key, package: Optional[PackageName]):
|
||||
def get_unique_id(self, key, package: Optional[PackageName], version: Optional[NodeVersion]):
|
||||
if version:
|
||||
key = f"{key}.v{version}"
|
||||
return find_unique_id_for_package(self.storage, key, package)
|
||||
|
||||
def find(self, key, package: Optional[PackageName], manifest: "Manifest"):
|
||||
unique_id = self.get_unique_id(key, package)
|
||||
def find(
|
||||
self,
|
||||
key,
|
||||
package: Optional[PackageName],
|
||||
version: Optional[NodeVersion],
|
||||
manifest: "Manifest",
|
||||
):
|
||||
unique_id = self.get_unique_id(key, package, version)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
node = self.perform_lookup(unique_id, manifest)
|
||||
# If this is an unpinned ref (no 'version' arg was passed),
|
||||
# AND this is a versioned node,
|
||||
# AND this ref is being resolved at runtime -- get_node_info != {}
|
||||
if version is None and node.is_versioned and get_node_info():
|
||||
# Check to see if newer versions are available, and log an "FYI" if so
|
||||
max_version: UnparsedVersion = max(
|
||||
[
|
||||
UnparsedVersion(v.version)
|
||||
for v in manifest.nodes.values()
|
||||
if v.name == node.name and v.version is not None
|
||||
]
|
||||
)
|
||||
assert node.latest_version is not None # for mypy, whenever i may find it
|
||||
if max_version > UnparsedVersion(node.latest_version):
|
||||
fire_event(
|
||||
UnpinnedRefNewVersionAvailable(
|
||||
node_info=get_node_info(),
|
||||
ref_node_name=node.name,
|
||||
ref_node_package=node.package_name,
|
||||
ref_node_version=str(node.version),
|
||||
ref_max_version=str(max_version.v),
|
||||
)
|
||||
)
|
||||
|
||||
return node
|
||||
return None
|
||||
|
||||
def add_node(self, node: ManifestNode):
|
||||
if node.resource_type in self._lookup_types:
|
||||
if node.name not in self.storage:
|
||||
self.storage[node.name] = {}
|
||||
self.storage[node.name][node.package_name] = node.unique_id
|
||||
|
||||
if node.is_versioned:
|
||||
if node.search_name not in self.storage:
|
||||
self.storage[node.search_name] = {}
|
||||
self.storage[node.search_name][node.package_name] = node.unique_id
|
||||
if node.is_latest_version: # type: ignore
|
||||
self.storage[node.name][node.package_name] = node.unique_id
|
||||
else:
|
||||
self.storage[node.name][node.package_name] = node.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for node in manifest.nodes.values():
|
||||
@@ -231,7 +276,12 @@ class DisabledLookup(dbtClassMixin):
|
||||
|
||||
# This should return a list of disabled nodes. It's different from
|
||||
# the other Lookup functions in that it returns full nodes, not just unique_ids
|
||||
def find(self, search_name, package: Optional[PackageName]):
|
||||
def find(
|
||||
self, search_name, package: Optional[PackageName], version: Optional[NodeVersion] = None
|
||||
):
|
||||
if version:
|
||||
search_name = f"{search_name}.v{version}"
|
||||
|
||||
if search_name not in self.storage:
|
||||
return None
|
||||
|
||||
@@ -250,6 +300,7 @@ class DisabledLookup(dbtClassMixin):
|
||||
|
||||
class AnalysisLookup(RefableLookup):
|
||||
_lookup_types: ClassVar[set] = set([NodeType.Analysis])
|
||||
_versioned_types: ClassVar[set] = set()
|
||||
|
||||
|
||||
def _search_packages(
|
||||
@@ -303,7 +354,7 @@ class ManifestMetadata(BaseArtifactMetadata):
|
||||
self.user_id = tracking.active_user.id
|
||||
|
||||
if self.send_anonymous_usage_stats is None:
|
||||
self.send_anonymous_usage_stats = flags.SEND_ANONYMOUS_USAGE_STATS
|
||||
self.send_anonymous_usage_stats = get_flags().SEND_ANONYMOUS_USAGE_STATS
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
@@ -599,6 +650,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs: MutableMapping[str, Documentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, Exposure] = field(default_factory=dict)
|
||||
metrics: MutableMapping[str, Metric] = field(default_factory=dict)
|
||||
groups: MutableMapping[str, Group] = field(default_factory=dict)
|
||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
@@ -631,7 +683,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
metadata={"serialize": lambda x: None, "deserialize": lambda x: None},
|
||||
)
|
||||
_lock: Lock = field(
|
||||
default_factory=flags.MP_CONTEXT.Lock,
|
||||
default_factory=MP_CONTEXT.Lock,
|
||||
metadata={"serialize": lambda x: None, "deserialize": lambda x: None},
|
||||
)
|
||||
|
||||
@@ -643,27 +695,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
@classmethod
|
||||
def __post_deserialize__(cls, obj):
|
||||
obj._lock = flags.MP_CONTEXT.Lock()
|
||||
obj._lock = MP_CONTEXT.Lock()
|
||||
return obj
|
||||
|
||||
def sync_update_node(self, new_node: ManifestNode) -> ManifestNode:
|
||||
"""update the node with a lock. The only time we should want to lock is
|
||||
when compiling an ephemeral ancestor of a node at runtime, because
|
||||
multiple threads could be just-in-time compiling the same ephemeral
|
||||
dependency, and we want them to have a consistent view of the manifest.
|
||||
|
||||
If the existing node is not compiled, update it with the new node and
|
||||
return that. If the existing node is compiled, do not update the
|
||||
manifest and return the existing node.
|
||||
"""
|
||||
with self._lock:
|
||||
existing = self.nodes[new_node.unique_id]
|
||||
if getattr(existing, "compiled", False):
|
||||
# already compiled
|
||||
return existing
|
||||
_update_into(self.nodes, new_node)
|
||||
return new_node
|
||||
|
||||
def update_exposure(self, new_exposure: Exposure):
|
||||
_update_into(self.exposures, new_exposure)
|
||||
|
||||
@@ -684,6 +718,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
"""
|
||||
self.flat_graph = {
|
||||
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
||||
"groups": {k: v.to_dict(omit_none=False) for k, v in self.groups.items()},
|
||||
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
||||
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
||||
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
||||
@@ -775,6 +810,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
||||
groups={k: _deepcopy(v) for k, v in self.groups.items()},
|
||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||
metadata=self.metadata,
|
||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||
@@ -807,8 +843,22 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
forward_edges = build_macro_edges(edge_members)
|
||||
return forward_edges
|
||||
|
||||
def build_group_map(self):
|
||||
groupable_nodes = list(
|
||||
chain(
|
||||
self.nodes.values(),
|
||||
self.metrics.values(),
|
||||
)
|
||||
)
|
||||
group_map = {group.name: [] for group in self.groups.values()}
|
||||
for node in groupable_nodes:
|
||||
if node.group is not None:
|
||||
group_map[node.group].append(node.unique_id)
|
||||
self.group_map = group_map
|
||||
|
||||
def writable_manifest(self):
|
||||
self.build_parent_and_child_maps()
|
||||
self.build_group_map()
|
||||
return WritableManifest(
|
||||
nodes=self.nodes,
|
||||
sources=self.sources,
|
||||
@@ -816,11 +866,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs=self.docs,
|
||||
exposures=self.exposures,
|
||||
metrics=self.metrics,
|
||||
groups=self.groups,
|
||||
selectors=self.selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=self.disabled,
|
||||
child_map=self.child_map,
|
||||
parent_map=self.parent_map,
|
||||
group_map=self.group_map,
|
||||
)
|
||||
|
||||
def write(self, path):
|
||||
@@ -897,6 +949,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self,
|
||||
target_model_name: str,
|
||||
target_model_package: Optional[str],
|
||||
target_model_version: Optional[NodeVersion],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeNonSource:
|
||||
@@ -906,14 +959,14 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
candidates = _search_packages(current_project, node_package, target_model_package)
|
||||
for pkg in candidates:
|
||||
node = self.ref_lookup.find(target_model_name, pkg, self)
|
||||
node = self.ref_lookup.find(target_model_name, pkg, target_model_version, self)
|
||||
|
||||
if node is not None and node.config.enabled:
|
||||
return node
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.disabled_lookup.find(target_model_name, pkg)
|
||||
disabled = self.disabled_lookup.find(target_model_name, pkg, target_model_version)
|
||||
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
@@ -1070,6 +1123,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
if isinstance(node, Group):
|
||||
source_file.groups.append(node.unique_id)
|
||||
else:
|
||||
source_file.nodes.append(node.unique_id)
|
||||
|
||||
@@ -1083,6 +1138,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
|
||||
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||
_check_duplicates(group, self.groups)
|
||||
self.groups[group.unique_id] = group
|
||||
source_file.groups.append(group.unique_id)
|
||||
|
||||
def add_disabled_nofile(self, node: GraphMemberNode):
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
if node.unique_id in self.disabled:
|
||||
@@ -1125,6 +1185,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.docs,
|
||||
self.exposures,
|
||||
self.metrics,
|
||||
self.groups,
|
||||
self.selectors,
|
||||
self.files,
|
||||
self.metadata,
|
||||
@@ -1156,7 +1217,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 8)
|
||||
@schema_version("manifest", 9)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1178,10 +1239,13 @@ class WritableManifest(ArtifactMixin):
|
||||
metrics: Mapping[UniqueID, Metric] = field(
|
||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||
)
|
||||
groups: Mapping[UniqueID, Group] = field(
|
||||
metadata=dict(description=("The groups defined in the dbt project"))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||
)
|
||||
disabled: Optional[Mapping[UniqueID, List[ResultNode]]] = field(
|
||||
disabled: Optional[Mapping[UniqueID, List[GraphMemberNode]]] = field(
|
||||
metadata=dict(description="A mapping of the disabled nodes in the target")
|
||||
)
|
||||
parent_map: Optional[NodeEdgeMap] = field(
|
||||
@@ -1194,6 +1258,11 @@ class WritableManifest(ArtifactMixin):
|
||||
description="A mapping from parent nodes to their dependents",
|
||||
)
|
||||
)
|
||||
group_map: Optional[NodeEdgeMap] = field(
|
||||
metadata=dict(
|
||||
description="A mapping from group names to their nodes",
|
||||
)
|
||||
)
|
||||
metadata: ManifestMetadata = field(
|
||||
metadata=dict(
|
||||
description="Metadata about the manifest",
|
||||
@@ -1202,7 +1271,21 @@ class WritableManifest(ArtifactMixin):
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(self):
|
||||
return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||
return [
|
||||
("manifest", 4),
|
||||
("manifest", 5),
|
||||
("manifest", 6),
|
||||
("manifest", 7),
|
||||
("manifest", 8),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def upgrade_schema_version(cls, data):
|
||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
|
||||
if get_manifest_schema_version(data) <= 8:
|
||||
data = upgrade_manifest_json(data)
|
||||
return cls.from_dict(data)
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
for unique_id, node in dct["nodes"].items():
|
||||
@@ -1211,6 +1294,13 @@ class WritableManifest(ArtifactMixin):
|
||||
return dct
|
||||
|
||||
|
||||
def get_manifest_schema_version(dct: dict) -> int:
|
||||
schema_version = dct.get("metadata", {}).get("dbt_schema_version", None)
|
||||
if not schema_version:
|
||||
raise ValueError("Manifest doesn't have schema version")
|
||||
return int(schema_version.split(".")[-2][-1])
|
||||
|
||||
|
||||
def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]):
|
||||
if value.unique_id in src:
|
||||
raise DuplicateResourceNameError(value, src[value.unique_id])
|
||||
|
||||
128
core/dbt/contracts/graph/manifest_upgrade.py
Normal file
128
core/dbt/contracts/graph/manifest_upgrade.py
Normal file
@@ -0,0 +1,128 @@
|
||||
from dbt import deprecations
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
|
||||
# we renamed these properties in v1.3
|
||||
# this method allows us to be nice to the early adopters
|
||||
def rename_metric_attr(data: dict, raise_deprecation_warning: bool = False) -> dict:
|
||||
metric_name = data["name"]
|
||||
if raise_deprecation_warning and (
|
||||
"sql" in data.keys()
|
||||
or "type" in data.keys()
|
||||
or data.get("calculation_method") == "expression"
|
||||
):
|
||||
deprecations.warn("metric-attr-renamed", metric_name=metric_name)
|
||||
duplicated_attribute_msg = """\n
|
||||
The metric '{}' contains both the deprecated metric property '{}'
|
||||
and the up-to-date metric property '{}'. Please remove the deprecated property.
|
||||
"""
|
||||
if "sql" in data.keys():
|
||||
if "expression" in data.keys():
|
||||
raise ValidationError(
|
||||
duplicated_attribute_msg.format(metric_name, "sql", "expression")
|
||||
)
|
||||
else:
|
||||
data["expression"] = data.pop("sql")
|
||||
if "type" in data.keys():
|
||||
if "calculation_method" in data.keys():
|
||||
raise ValidationError(
|
||||
duplicated_attribute_msg.format(metric_name, "type", "calculation_method")
|
||||
)
|
||||
else:
|
||||
calculation_method = data.pop("type")
|
||||
data["calculation_method"] = calculation_method
|
||||
# we also changed "type: expression" -> "calculation_method: derived"
|
||||
if data.get("calculation_method") == "expression":
|
||||
data["calculation_method"] = "derived"
|
||||
return data
|
||||
|
||||
|
||||
def rename_sql_attr(node_content: dict) -> dict:
|
||||
if "raw_sql" in node_content:
|
||||
node_content["raw_code"] = node_content.pop("raw_sql")
|
||||
if "compiled_sql" in node_content:
|
||||
node_content["compiled_code"] = node_content.pop("compiled_sql")
|
||||
node_content["language"] = "sql"
|
||||
return node_content
|
||||
|
||||
|
||||
def upgrade_ref_content(node_content: dict) -> dict:
|
||||
# In v1.5 we switched Node.refs from List[List[str]] to List[Dict[str, Union[NodeVersion, str]]]
|
||||
# Previous versions did not have a version keyword argument for ref
|
||||
if "refs" in node_content:
|
||||
upgraded_refs = []
|
||||
for ref in node_content["refs"]:
|
||||
if isinstance(ref, list):
|
||||
if len(ref) == 1:
|
||||
upgraded_refs.append({"package": None, "name": ref[0], "version": None})
|
||||
else:
|
||||
upgraded_refs.append({"package": ref[0], "name": ref[1], "version": None})
|
||||
node_content["refs"] = upgraded_refs
|
||||
return node_content
|
||||
|
||||
|
||||
def upgrade_node_content(node_content):
|
||||
rename_sql_attr(node_content)
|
||||
upgrade_ref_content(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
|
||||
|
||||
def upgrade_seed_content(node_content):
|
||||
# Remove compilation related attributes
|
||||
for attr_name in (
|
||||
"language",
|
||||
"refs",
|
||||
"sources",
|
||||
"metrics",
|
||||
"compiled_path",
|
||||
"compiled",
|
||||
"compiled_code",
|
||||
"extra_ctes_injected",
|
||||
"extra_ctes",
|
||||
"relation_name",
|
||||
):
|
||||
if attr_name in node_content:
|
||||
del node_content[attr_name]
|
||||
# In v1.4, we switched SeedNode.depends_on from DependsOn to MacroDependsOn
|
||||
node_content.get("depends_on", {}).pop("nodes", None)
|
||||
|
||||
|
||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
for node_content in manifest.get("nodes", {}).values():
|
||||
upgrade_node_content(node_content)
|
||||
if node_content["resource_type"] == "seed":
|
||||
upgrade_seed_content(node_content)
|
||||
for disabled in manifest.get("disabled", {}).values():
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
# so make sure all the nodes get the attr renamed
|
||||
for node_content in disabled:
|
||||
upgrade_node_content(node_content)
|
||||
if node_content["resource_type"] == "seed":
|
||||
upgrade_seed_content(node_content)
|
||||
# add group key
|
||||
if "groups" not in manifest:
|
||||
manifest["groups"] = {}
|
||||
if "group_map" not in manifest:
|
||||
manifest["group_map"] = {}
|
||||
for metric_content in manifest.get("metrics", {}).values():
|
||||
# handle attr renames + value translation ("expression" -> "derived")
|
||||
metric_content = rename_metric_attr(metric_content)
|
||||
metric_content = upgrade_ref_content(metric_content)
|
||||
if "root_path" in metric_content:
|
||||
del metric_content["root_path"]
|
||||
for exposure_content in manifest.get("exposures", {}).values():
|
||||
exposure_content = upgrade_ref_content(exposure_content)
|
||||
if "root_path" in exposure_content:
|
||||
del exposure_content["root_path"]
|
||||
for source_content in manifest.get("sources", {}).values():
|
||||
if "root_path" in source_content:
|
||||
del source_content["root_path"]
|
||||
for macro_content in manifest.get("macros", {}).values():
|
||||
if "root_path" in macro_content:
|
||||
del macro_content["root_path"]
|
||||
for doc_content in manifest.get("docs", {}).values():
|
||||
if "root_path" in doc_content:
|
||||
del doc_content["root_path"]
|
||||
doc_content["resource_type"] = "doc"
|
||||
return manifest
|
||||
@@ -189,6 +189,11 @@ class Severity(str):
|
||||
register_pattern(Severity, insensitive_patterns("warn", "error"))
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContractConfig(dbtClassMixin, Replaceable):
|
||||
enforced: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class Hook(dbtClassMixin, Replaceable):
|
||||
sql: str
|
||||
@@ -286,7 +291,7 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
# 'meta' moved here from node
|
||||
mergebehavior = {
|
||||
"append": ["pre-hook", "pre_hook", "post-hook", "post_hook", "tags"],
|
||||
"update": ["quoting", "column_types", "meta", "docs"],
|
||||
"update": ["quoting", "column_types", "meta", "docs", "contract"],
|
||||
"dict_key_append": ["grants"],
|
||||
}
|
||||
|
||||
@@ -366,6 +371,7 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
@dataclass
|
||||
class MetricConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
group: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -403,6 +409,10 @@ class NodeAndTestConfig(BaseConfig):
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
group: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -446,9 +456,13 @@ class NodeConfig(NodeAndTestConfig):
|
||||
default_factory=Docs,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
contract: ContractConfig = field(
|
||||
default_factory=ContractConfig,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||
def __post_init__(self):
|
||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||
if self.docs.node_color:
|
||||
node_color = self.docs.node_color
|
||||
if not validate_color(node_color):
|
||||
@@ -457,6 +471,17 @@ class NodeConfig(NodeAndTestConfig):
|
||||
"It is neither a valid HTML color name nor a valid HEX code."
|
||||
)
|
||||
|
||||
if (
|
||||
self.contract.enforced
|
||||
and self.materialized == "incremental"
|
||||
and self.on_schema_change not in ("append_new_columns", "fail")
|
||||
):
|
||||
raise ValidationError(
|
||||
f"Invalid value for on_schema_change: {self.on_schema_change}. Models "
|
||||
"materialized as incremental with contracts enabled must set "
|
||||
"on_schema_change to 'append_new_columns' or 'fail'"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
import hashlib
|
||||
|
||||
from mashumaro.types import SerializableType
|
||||
from typing import (
|
||||
Optional,
|
||||
@@ -18,36 +21,36 @@ from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
||||
from dbt.clients.system import write_file
|
||||
from dbt.contracts.files import FileHash
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
Quoting,
|
||||
Docs,
|
||||
FreshnessThreshold,
|
||||
ExposureType,
|
||||
ExternalTable,
|
||||
FreshnessThreshold,
|
||||
HasYamlMetadata,
|
||||
MacroArgument,
|
||||
UnparsedSourceDefinition,
|
||||
UnparsedSourceTableDefinition,
|
||||
UnparsedColumn,
|
||||
TestDef,
|
||||
ExposureOwner,
|
||||
ExposureType,
|
||||
MaturityType,
|
||||
MetricFilter,
|
||||
MetricTime,
|
||||
Owner,
|
||||
Quoting,
|
||||
TestDef,
|
||||
NodeVersion,
|
||||
UnparsedSourceDefinition,
|
||||
UnparsedSourceTableDefinition,
|
||||
UnparsedColumn,
|
||||
)
|
||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||
from dbt.events.proto_types import NodeInfo
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.exceptions import ParsingError, InvalidAccessTypeError, ContractBreakingChangeError
|
||||
from dbt.events.types import (
|
||||
SeedIncreased,
|
||||
SeedExceedsLimitSamePath,
|
||||
SeedExceedsLimitAndPathChanged,
|
||||
SeedExceedsLimitChecksumChanged,
|
||||
ValidationWarning,
|
||||
)
|
||||
from dbt.events.contextvars import set_contextvars
|
||||
from dbt import flags
|
||||
from dbt.node_types import ModelLanguage, NodeType
|
||||
from dbt.utils import cast_dict_to_dict_of_strings
|
||||
|
||||
from dbt.events.contextvars import set_log_contextvars
|
||||
from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
||||
|
||||
from .model_config import (
|
||||
NodeConfig,
|
||||
@@ -60,6 +63,7 @@ from .model_config import (
|
||||
SnapshotConfig,
|
||||
)
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# This contains the classes for all of the nodes and node-like objects
|
||||
# in the manifest. In the "nodes" dictionary of the manifest we find
|
||||
@@ -115,6 +119,10 @@ class BaseNode(dbtClassMixin, Replaceable):
|
||||
def is_relational(self):
|
||||
return self.resource_type in NodeType.refable()
|
||||
|
||||
@property
|
||||
def is_versioned(self):
|
||||
return self.resource_type in NodeType.versioned() and self.version is not None
|
||||
|
||||
@property
|
||||
def is_ephemeral(self):
|
||||
return self.config.materialized == "ephemeral"
|
||||
@@ -137,6 +145,65 @@ class GraphNode(BaseNode):
|
||||
return self.fqn == other.fqn
|
||||
|
||||
|
||||
@dataclass
|
||||
class RefArgs(dbtClassMixin):
|
||||
name: str
|
||||
package: Optional[str] = None
|
||||
version: Optional[NodeVersion] = None
|
||||
|
||||
@property
|
||||
def positional_args(self) -> List[str]:
|
||||
if self.package:
|
||||
return [self.package, self.name]
|
||||
else:
|
||||
return [self.name]
|
||||
|
||||
@property
|
||||
def keyword_args(self) -> Dict[str, Optional[NodeVersion]]:
|
||||
if self.version:
|
||||
return {"version": self.version}
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
class ConstraintType(str, Enum):
|
||||
check = "check"
|
||||
not_null = "not_null"
|
||||
unique = "unique"
|
||||
primary_key = "primary_key"
|
||||
foreign_key = "foreign_key"
|
||||
custom = "custom"
|
||||
|
||||
@classmethod
|
||||
def is_valid(cls, item):
|
||||
try:
|
||||
cls(item)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnLevelConstraint(dbtClassMixin):
|
||||
type: ConstraintType
|
||||
name: Optional[str] = None
|
||||
# expression is a user-provided field that will depend on the constraint type.
|
||||
# It could be a predicate (check type), or a sequence sql keywords (e.g. unique type),
|
||||
# so the vague naming of 'expression' is intended to capture this range.
|
||||
expression: Optional[str] = None
|
||||
warn_unenforced: bool = (
|
||||
True # Warn if constraint cannot be enforced by platform but will be in DDL
|
||||
)
|
||||
warn_unsupported: bool = (
|
||||
True # Warn if constraint is not supported by the platform and won't be in DDL
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelLevelConstraint(ColumnLevelConstraint):
|
||||
columns: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable):
|
||||
"""Used in all ManifestNodes and SourceDefinition"""
|
||||
@@ -145,11 +212,18 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable
|
||||
description: str = ""
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
data_type: Optional[str] = None
|
||||
constraints: List[ColumnLevelConstraint] = field(default_factory=list)
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Contract(dbtClassMixin, Replaceable):
|
||||
enforced: bool = False
|
||||
checksum: Optional[str] = None
|
||||
|
||||
|
||||
# Metrics, exposures,
|
||||
@dataclass
|
||||
class HasRelationMetadata(dbtClassMixin, Replaceable):
|
||||
@@ -207,8 +281,6 @@ class NodeInfoMixin:
|
||||
|
||||
@property
|
||||
def node_info(self):
|
||||
meta = getattr(self, "meta", {})
|
||||
meta_stringified = cast_dict_to_dict_of_strings(meta)
|
||||
node_info = {
|
||||
"node_path": getattr(self, "path", None),
|
||||
"node_name": getattr(self, "name", None),
|
||||
@@ -218,15 +290,20 @@ class NodeInfoMixin:
|
||||
"node_status": str(self._event_status.get("node_status")),
|
||||
"node_started_at": self._event_status.get("started_at"),
|
||||
"node_finished_at": self._event_status.get("finished_at"),
|
||||
"meta": meta_stringified,
|
||||
"meta": getattr(self, "meta", {}),
|
||||
"node_relation": {
|
||||
"database": getattr(self, "database", None),
|
||||
"schema": getattr(self, "schema", None),
|
||||
"alias": getattr(self, "alias", None),
|
||||
"relation_name": getattr(self, "relation_name", None),
|
||||
},
|
||||
}
|
||||
node_info_msg = NodeInfo(**node_info)
|
||||
return node_info_msg
|
||||
return node_info
|
||||
|
||||
def update_event_status(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
self._event_status[k] = v
|
||||
set_contextvars(node_info=self.node_info)
|
||||
set_log_contextvars(node_info=self.node_info)
|
||||
|
||||
def clear_event_status(self):
|
||||
self._event_status = dict()
|
||||
@@ -238,6 +315,7 @@ class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType):
|
||||
description: str = field(default="")
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
group: Optional[str] = None
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
patch_path: Optional[str] = None
|
||||
build_path: Optional[str] = None
|
||||
@@ -248,17 +326,23 @@ class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType):
|
||||
relation_name: Optional[str] = None
|
||||
raw_code: str = ""
|
||||
|
||||
def write_node(self, target_path: str, subdirectory: str, payload: str):
|
||||
def get_target_write_path(self, target_path: str, subdirectory: str):
|
||||
# This is called for both the "compiled" subdirectory of "target" and the "run" subdirectory
|
||||
if os.path.basename(self.path) == os.path.basename(self.original_file_path):
|
||||
# One-to-one relationship of nodes to files.
|
||||
path = self.original_file_path
|
||||
else:
|
||||
# Many-to-one relationship of nodes to files.
|
||||
path = os.path.join(self.original_file_path, self.path)
|
||||
full_path = os.path.join(target_path, subdirectory, self.package_name, path)
|
||||
target_write_path = os.path.join(target_path, subdirectory, self.package_name, path)
|
||||
return target_write_path
|
||||
|
||||
write_file(full_path, payload)
|
||||
return full_path
|
||||
def write_node(self, project_root: str, compiled_path, compiled_code: str):
|
||||
if os.path.isabs(compiled_path):
|
||||
full_path = compiled_path
|
||||
else:
|
||||
full_path = os.path.join(project_root, compiled_path)
|
||||
write_file(full_path, compiled_code)
|
||||
|
||||
def _serialize(self):
|
||||
return self.to_dict()
|
||||
@@ -350,8 +434,18 @@ class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType):
|
||||
old.unrendered_config,
|
||||
)
|
||||
|
||||
def build_contract_checksum(self):
|
||||
pass
|
||||
|
||||
def same_contract(self, old) -> bool:
|
||||
# This would only apply to seeds
|
||||
return True
|
||||
|
||||
def patch(self, patch: "ParsedNodePatch"):
|
||||
"""Given a ParsedNodePatch, add the new information to the node."""
|
||||
# NOTE: Constraint patching is awkwardly done in the parse_patch function
|
||||
# which calls this one. We need to combine the logic.
|
||||
|
||||
# explicitly pick out the parts to update so we don't inadvertently
|
||||
# step on the model name or anything
|
||||
# Note: config should already be updated
|
||||
@@ -360,17 +454,64 @@ class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType):
|
||||
self.created_at = time.time()
|
||||
self.description = patch.description
|
||||
self.columns = patch.columns
|
||||
self.name = patch.name
|
||||
|
||||
# TODO: version, latest_version, and access are specific to ModelNodes, consider splitting out to ModelNode
|
||||
if self.resource_type != NodeType.Model:
|
||||
if patch.version:
|
||||
warn_or_error(
|
||||
ValidationWarning(
|
||||
field_name="version",
|
||||
resource_type=self.resource_type.value,
|
||||
node_name=patch.name,
|
||||
)
|
||||
)
|
||||
if patch.latest_version:
|
||||
warn_or_error(
|
||||
ValidationWarning(
|
||||
field_name="latest_version",
|
||||
resource_type=self.resource_type.value,
|
||||
node_name=patch.name,
|
||||
)
|
||||
)
|
||||
self.version = patch.version
|
||||
self.latest_version = patch.latest_version
|
||||
|
||||
# This might not be the ideal place to validate the "access" field,
|
||||
# but at this point we have the information we need to properly
|
||||
# validate and we don't before this.
|
||||
if patch.access:
|
||||
if self.resource_type == NodeType.Model:
|
||||
if AccessType.is_valid(patch.access):
|
||||
self.access = AccessType(patch.access)
|
||||
else:
|
||||
raise InvalidAccessTypeError(
|
||||
unique_id=self.unique_id,
|
||||
field_value=patch.access,
|
||||
)
|
||||
else:
|
||||
warn_or_error(
|
||||
ValidationWarning(
|
||||
field_name="access",
|
||||
resource_type=self.resource_type.value,
|
||||
node_name=patch.name,
|
||||
)
|
||||
)
|
||||
|
||||
def same_contents(self, old) -> bool:
|
||||
if old is None:
|
||||
return False
|
||||
|
||||
# Need to ensure that same_contract is called because it
|
||||
# could throw an error
|
||||
same_contract = self.same_contract(old)
|
||||
return (
|
||||
self.same_body(old)
|
||||
and self.same_config(old)
|
||||
and self.same_persisted_description(old)
|
||||
and self.same_fqn(old)
|
||||
and self.same_database_representation(old)
|
||||
and same_contract
|
||||
and True
|
||||
)
|
||||
|
||||
@@ -389,7 +530,7 @@ class CompiledNode(ParsedNode):
|
||||
so all ManifestNodes except SeedNode."""
|
||||
|
||||
language: str = "sql"
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
@@ -399,6 +540,7 @@ class CompiledNode(ParsedNode):
|
||||
extra_ctes_injected: bool = False
|
||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||
_pre_injected_sql: Optional[str] = None
|
||||
contract: Contract = field(default_factory=Contract)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
@@ -409,8 +551,10 @@ class CompiledNode(ParsedNode):
|
||||
do if extra_ctes were an OrderedDict
|
||||
"""
|
||||
for cte in self.extra_ctes:
|
||||
# Because it's possible that multiple threads are compiling the
|
||||
# node at the same time, we don't want to overwrite already compiled
|
||||
# sql in the extra_ctes with empty sql.
|
||||
if cte.id == cte_id:
|
||||
cte.sql = sql
|
||||
break
|
||||
else:
|
||||
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
|
||||
@@ -437,6 +581,81 @@ class CompiledNode(ParsedNode):
|
||||
def depends_on_macros(self):
|
||||
return self.depends_on.macros
|
||||
|
||||
def build_contract_checksum(self):
|
||||
# We don't need to construct the checksum if the model does not
|
||||
# have contract enforced, because it won't be used.
|
||||
# This needs to be executed after contract config is set
|
||||
|
||||
# Avoid rebuilding the checksum if it has already been set.
|
||||
if self.contract.checksum is not None:
|
||||
return
|
||||
|
||||
if self.contract.enforced is True:
|
||||
contract_state = ""
|
||||
# We need to sort the columns so that order doesn't matter
|
||||
# columns is a str: ColumnInfo dictionary
|
||||
sorted_columns = sorted(self.columns.values(), key=lambda col: col.name)
|
||||
for column in sorted_columns:
|
||||
contract_state += f"|{column.name}"
|
||||
contract_state += str(column.data_type)
|
||||
data = contract_state.encode("utf-8")
|
||||
self.contract.checksum = hashlib.new("sha256", data).hexdigest()
|
||||
|
||||
def same_contract(self, old) -> bool:
|
||||
# If the contract wasn't previously enforced:
|
||||
if old.contract.enforced is False and self.contract.enforced is False:
|
||||
# No change -- same_contract: True
|
||||
return True
|
||||
if old.contract.enforced is False and self.contract.enforced is True:
|
||||
# Now it's enforced. This is a change, but not a breaking change -- same_contract: False
|
||||
return False
|
||||
|
||||
# Otherwise: The contract was previously enforced, and we need to check for changes.
|
||||
# Happy path: The contract is still being enforced, and the checksums are identical.
|
||||
if self.contract.enforced is True and self.contract.checksum == old.contract.checksum:
|
||||
# No change -- same_contract: True
|
||||
return True
|
||||
|
||||
# Otherwise: There has been a change.
|
||||
# We need to determine if it is a **breaking** change.
|
||||
# These are the categories of breaking changes:
|
||||
contract_enforced_disabled: bool = False
|
||||
columns_removed: List[str] = []
|
||||
column_type_changes: List[Tuple[str, str, str]] = []
|
||||
|
||||
if old.contract.enforced is True and self.contract.enforced is False:
|
||||
# Breaking change: the contract was previously enforced, and it no longer is
|
||||
contract_enforced_disabled = True
|
||||
|
||||
# Next, compare each column from the previous contract (old.columns)
|
||||
for key, value in sorted(old.columns.items()):
|
||||
# Has this column been removed?
|
||||
if key not in self.columns.keys():
|
||||
columns_removed.append(value.name)
|
||||
# Has this column's data type changed?
|
||||
elif value.data_type != self.columns[key].data_type:
|
||||
column_type_changes.append(
|
||||
(str(value.name), str(value.data_type), str(self.columns[key].data_type))
|
||||
)
|
||||
|
||||
# If a column has been added, it will be missing in the old.columns, and present in self.columns
|
||||
# That's a change (caught by the different checksums), but not a breaking change
|
||||
|
||||
# Did we find any changes that we consider breaking? If so, that's an error
|
||||
if contract_enforced_disabled or columns_removed or column_type_changes:
|
||||
raise (
|
||||
ContractBreakingChangeError(
|
||||
contract_enforced_disabled=contract_enforced_disabled,
|
||||
columns_removed=columns_removed,
|
||||
column_type_changes=column_type_changes,
|
||||
node=self,
|
||||
)
|
||||
)
|
||||
|
||||
# Otherwise, though we didn't find any *breaking* changes, the contract has still changed -- same_contract: False
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
# ====================================
|
||||
# CompiledNode subclasses
|
||||
@@ -457,6 +676,21 @@ class HookNode(CompiledNode):
|
||||
@dataclass
|
||||
class ModelNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Model]})
|
||||
access: AccessType = AccessType.Protected
|
||||
constraints: List[ModelLevelConstraint] = field(default_factory=list)
|
||||
version: Optional[NodeVersion] = None
|
||||
latest_version: Optional[NodeVersion] = None
|
||||
|
||||
@property
|
||||
def is_latest_version(self) -> bool:
|
||||
return self.version is not None and self.version == self.latest_version
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
if self.version is None:
|
||||
return self.name
|
||||
else:
|
||||
return f"{self.name}.v{self.version}"
|
||||
|
||||
|
||||
# TODO: rm?
|
||||
@@ -482,6 +716,7 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
# seeds need the root_path because the contents are not loaded initially
|
||||
# and we need the root_path to load the seed later
|
||||
root_path: Optional[str] = None
|
||||
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
|
||||
|
||||
def same_seeds(self, other: "SeedNode") -> bool:
|
||||
# for seeds, we check the hashes. If the hashes are different types,
|
||||
@@ -523,6 +758,39 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
"""Seeds are never empty"""
|
||||
return False
|
||||
|
||||
def _disallow_implicit_dependencies(self):
|
||||
"""Disallow seeds to take implicit upstream dependencies via pre/post hooks"""
|
||||
# Seeds are root nodes in the DAG. They cannot depend on other nodes.
|
||||
# However, it's possible to define pre- and post-hooks on seeds, and for those
|
||||
# hooks to include {{ ref(...) }}. This worked in previous versions, but it
|
||||
# was never officially documented or supported behavior. Let's raise an explicit error,
|
||||
# which will surface during parsing if the user has written code such that we attempt
|
||||
# to capture & record a ref/source/metric call on the SeedNode.
|
||||
# For more details: https://github.com/dbt-labs/dbt-core/issues/6806
|
||||
hooks = [f'- pre_hook: "{hook.sql}"' for hook in self.config.pre_hook] + [
|
||||
f'- post_hook: "{hook.sql}"' for hook in self.config.post_hook
|
||||
]
|
||||
hook_list = "\n".join(hooks)
|
||||
message = f"""
|
||||
Seeds cannot depend on other nodes. dbt detected a seed with a pre- or post-hook
|
||||
that calls 'ref', 'source', or 'metric', either directly or indirectly via other macros.
|
||||
|
||||
Error raised for '{self.unique_id}', which has these hooks defined: \n{hook_list}
|
||||
"""
|
||||
raise ParsingError(message)
|
||||
|
||||
@property
|
||||
def refs(self):
|
||||
self._disallow_implicit_dependencies()
|
||||
|
||||
@property
|
||||
def sources(self):
|
||||
self._disallow_implicit_dependencies()
|
||||
|
||||
@property
|
||||
def metrics(self):
|
||||
self._disallow_implicit_dependencies()
|
||||
|
||||
def same_body(self, other) -> bool:
|
||||
return self.same_seeds(other)
|
||||
|
||||
@@ -531,8 +799,8 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
return []
|
||||
|
||||
@property
|
||||
def depends_on_macros(self):
|
||||
return []
|
||||
def depends_on_macros(self) -> List[str]:
|
||||
return self.depends_on.macros
|
||||
|
||||
@property
|
||||
def extra_ctes(self):
|
||||
@@ -557,7 +825,7 @@ class TestShouldStoreFailures:
|
||||
def should_store_failures(self):
|
||||
if self.config.store_failures:
|
||||
return self.config.store_failures
|
||||
return flags.STORE_FAILURES
|
||||
return get_flags().STORE_FAILURES
|
||||
|
||||
@property
|
||||
def is_relational(self):
|
||||
@@ -608,6 +876,7 @@ class GenericTestNode(TestShouldStoreFailures, CompiledNode, HasTestMetadata):
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
||||
attached_node: Optional[str] = None
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
@@ -823,7 +1092,7 @@ class SourceDefinition(NodeInfoMixin, ParsedSourceMandatory):
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
# config changes are changes (because the only config is "enabled", and
|
||||
# config changes are changes (because the only config is "enforced", and
|
||||
# enabling a source is a change!)
|
||||
# changing the database/schema/identifier is a change
|
||||
# messing around with external stuff is a change (uh, right?)
|
||||
@@ -892,7 +1161,7 @@ class SourceDefinition(NodeInfoMixin, ParsedSourceMandatory):
|
||||
@dataclass
|
||||
class Exposure(GraphNode):
|
||||
type: ExposureType
|
||||
owner: ExposureOwner
|
||||
owner: Owner
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Exposure]})
|
||||
description: str = ""
|
||||
label: Optional[str] = None
|
||||
@@ -903,7 +1172,7 @@ class Exposure(GraphNode):
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
url: Optional[str] = None
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
@@ -995,9 +1264,10 @@ class Metric(GraphNode):
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
group: Optional[str] = None
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
@@ -1065,6 +1335,18 @@ class Metric(GraphNode):
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Group node
|
||||
# ====================================
|
||||
|
||||
|
||||
@dataclass
|
||||
class Group(BaseNode):
|
||||
name: str
|
||||
owner: Owner
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Group]})
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
# ====================================
|
||||
@@ -1085,6 +1367,9 @@ class ParsedPatch(HasYamlMetadata, Replaceable):
|
||||
@dataclass
|
||||
class ParsedNodePatch(ParsedPatch):
|
||||
columns: Dict[str, ColumnInfo]
|
||||
access: Optional[str]
|
||||
version: Optional[NodeVersion]
|
||||
latest_version: Optional[NodeVersion]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1133,6 +1418,7 @@ Resource = Union[
|
||||
GraphMemberNode,
|
||||
Documentation,
|
||||
Macro,
|
||||
Group,
|
||||
]
|
||||
|
||||
TestNode = Union[
|
||||
|
||||
@@ -6,12 +6,12 @@ from dbt.contracts.util import (
|
||||
AdditionalPropertiesMixin,
|
||||
Mergeable,
|
||||
Replaceable,
|
||||
rename_metric_attr,
|
||||
)
|
||||
from dbt.contracts.graph.manifest_upgrade import rename_metric_attr
|
||||
|
||||
# trigger the PathEncoder
|
||||
import dbt.helper_types # noqa:F401
|
||||
from dbt.exceptions import CompilationError, ParsingError
|
||||
from dbt.exceptions import CompilationError, ParsingError, DbtInternalError
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin, ValidationError
|
||||
|
||||
@@ -88,11 +88,12 @@ class Docs(dbtClassMixin, Replaceable):
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasDocs(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable):
|
||||
class HasColumnProps(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable):
|
||||
name: str
|
||||
description: str = ""
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
data_type: Optional[str] = None
|
||||
constraints: List[Dict[str, Any]] = field(default_factory=list)
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@@ -101,27 +102,23 @@ TestDef = Union[Dict[str, Any], str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasTests(HasDocs):
|
||||
tests: Optional[List[TestDef]] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.tests is None:
|
||||
self.tests = []
|
||||
class HasColumnAndTestProps(HasColumnProps):
|
||||
tests: List[TestDef] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedColumn(HasTests):
|
||||
class UnparsedColumn(HasColumnAndTestProps):
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasColumnDocs(dbtClassMixin, Replaceable):
|
||||
columns: Sequence[HasDocs] = field(default_factory=list)
|
||||
columns: Sequence[HasColumnProps] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasColumnTests(HasColumnDocs):
|
||||
class HasColumnTests(dbtClassMixin, Replaceable):
|
||||
columns: Sequence[UnparsedColumn] = field(default_factory=list)
|
||||
|
||||
|
||||
@@ -141,14 +138,121 @@ class HasConfig:
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasDocs, HasYamlMetadata):
|
||||
pass
|
||||
NodeVersion = Union[str, float]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasTests, HasYamlMetadata):
|
||||
class UnparsedVersion(dbtClassMixin):
|
||||
v: NodeVersion
|
||||
defined_in: Optional[str] = None
|
||||
description: str = ""
|
||||
access: Optional[str] = None
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
constraints: List[Dict[str, Any]] = field(default_factory=list)
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
tests: Optional[List[TestDef]] = None
|
||||
columns: Sequence[Union[dbt.helper_types.IncludeExclude, UnparsedColumn]] = field(
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
def __lt__(self, other):
|
||||
try:
|
||||
v = type(other.v)(self.v)
|
||||
return v < other.v
|
||||
except ValueError:
|
||||
try:
|
||||
other_v = type(self.v)(other.v)
|
||||
return self.v < other_v
|
||||
except ValueError:
|
||||
return str(self.v) < str(other.v)
|
||||
|
||||
@property
|
||||
def include_exclude(self) -> dbt.helper_types.IncludeExclude:
|
||||
return self._include_exclude
|
||||
|
||||
@property
|
||||
def unparsed_columns(self) -> List:
|
||||
return self._unparsed_columns
|
||||
|
||||
@property
|
||||
def formatted_v(self) -> str:
|
||||
return f"v{self.v}"
|
||||
|
||||
def __post_init__(self):
|
||||
has_include_exclude = False
|
||||
self._include_exclude = dbt.helper_types.IncludeExclude(include="*")
|
||||
self._unparsed_columns = []
|
||||
for column in self.columns:
|
||||
if isinstance(column, dbt.helper_types.IncludeExclude):
|
||||
if not has_include_exclude:
|
||||
self._include_exclude = column
|
||||
has_include_exclude = True
|
||||
else:
|
||||
raise ParsingError("version can have at most one include/exclude element")
|
||||
else:
|
||||
self._unparsed_columns.append(column)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasColumnProps, HasYamlMetadata):
|
||||
access: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasColumnAndTestProps, HasYamlMetadata):
|
||||
quote_columns: Optional[bool] = None
|
||||
access: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
quote_columns: Optional[bool] = None
|
||||
access: Optional[str] = None
|
||||
latest_version: Optional[NodeVersion] = None
|
||||
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
if self.latest_version:
|
||||
version_values = [version.v for version in self.versions]
|
||||
if self.latest_version not in version_values:
|
||||
raise ParsingError(
|
||||
f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
|
||||
)
|
||||
|
||||
seen_versions: set[str] = set()
|
||||
for version in self.versions:
|
||||
if str(version.v) in seen_versions:
|
||||
raise ParsingError(
|
||||
f"Found duplicate version: '{version.v}' in versions list of model '{self.name}'"
|
||||
)
|
||||
seen_versions.add(str(version.v))
|
||||
|
||||
self._version_map = {version.v: version for version in self.versions}
|
||||
|
||||
def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
|
||||
if version not in self._version_map:
|
||||
raise DbtInternalError(
|
||||
f"get_columns_for_version called for version '{version}' not in version map"
|
||||
)
|
||||
|
||||
version_columns = []
|
||||
unparsed_version = self._version_map[version]
|
||||
for base_column in self.columns:
|
||||
if unparsed_version.include_exclude.includes(base_column.name):
|
||||
version_columns.append(base_column)
|
||||
|
||||
for column in unparsed_version.unparsed_columns:
|
||||
version_columns.append(column)
|
||||
|
||||
return version_columns
|
||||
|
||||
def get_tests_for_version(self, version: NodeVersion) -> List[TestDef]:
|
||||
if version not in self._version_map:
|
||||
raise DbtInternalError(
|
||||
f"get_tests_for_version called for version '{version}' not in version map"
|
||||
)
|
||||
unparsed_version = self._version_map[version]
|
||||
return unparsed_version.tests if unparsed_version.tests is not None else self.tests
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -159,7 +263,7 @@ class MacroArgument(dbtClassMixin):
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedMacroUpdate(HasConfig, HasDocs, HasYamlMetadata):
|
||||
class UnparsedMacroUpdate(HasConfig, HasColumnProps, HasYamlMetadata):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
|
||||
|
||||
@@ -246,7 +350,7 @@ class Quoting(dbtClassMixin, Mergeable):
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedSourceTableDefinition(HasColumnTests, HasTests):
|
||||
class UnparsedSourceTableDefinition(HasColumnTests, HasColumnAndTestProps):
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
loaded_at_field: Optional[str] = None
|
||||
identifier: Optional[str] = None
|
||||
@@ -424,8 +528,8 @@ class MaturityType(StrEnum):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExposureOwner(dbtClassMixin, Replaceable):
|
||||
email: str
|
||||
class Owner(AdditionalPropertiesAllowed, Replaceable):
|
||||
email: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
@@ -433,7 +537,7 @@ class ExposureOwner(dbtClassMixin, Replaceable):
|
||||
class UnparsedExposure(dbtClassMixin, Replaceable):
|
||||
name: str
|
||||
type: ExposureType
|
||||
owner: ExposureOwner
|
||||
owner: Owner
|
||||
description: str = ""
|
||||
label: Optional[str] = None
|
||||
maturity: Optional[MaturityType] = None
|
||||
@@ -451,6 +555,9 @@ class UnparsedExposure(dbtClassMixin, Replaceable):
|
||||
if not (re.match(r"[\w-]+$", data["name"])):
|
||||
deprecations.warn("exposure-name", exposure=data["name"])
|
||||
|
||||
if data["owner"].get("name") is None and data["owner"].get("email") is None:
|
||||
raise ValidationError("Exposure owner must have at least one of 'name' or 'email'.")
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricFilter(dbtClassMixin, Replaceable):
|
||||
@@ -533,3 +640,15 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
|
||||
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
||||
raise ValidationError("Derived metrics cannot have a 'model' property")
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedGroup(dbtClassMixin, Replaceable):
|
||||
name: str
|
||||
owner: Owner
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super(UnparsedGroup, cls).validate(data)
|
||||
if data["owner"].get("name") is None and data["owner"].get("email") is None:
|
||||
raise ValidationError("Group owner must have at least one of 'name' or 'email'.")
|
||||
|
||||
@@ -184,8 +184,8 @@ BANNED_PROJECT_NAMES = {
|
||||
@dataclass
|
||||
class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
name: Identifier
|
||||
version: Union[SemverString, float]
|
||||
config_version: int
|
||||
config_version: Optional[int] = 2
|
||||
version: Optional[Union[SemverString, float]] = None
|
||||
project_root: Optional[str] = None
|
||||
source_paths: Optional[List[str]] = None
|
||||
model_paths: Optional[List[str]] = None
|
||||
@@ -243,21 +243,26 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
|
||||
@dataclass
|
||||
class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract):
|
||||
send_anonymous_usage_stats: bool = DEFAULT_SEND_ANONYMOUS_USAGE_STATS
|
||||
use_colors: Optional[bool] = None
|
||||
cache_selected_only: Optional[bool] = None
|
||||
debug: Optional[bool] = None
|
||||
fail_fast: Optional[bool] = None
|
||||
indirect_selection: Optional[str] = None
|
||||
log_format: Optional[str] = None
|
||||
log_format_file: Optional[str] = None
|
||||
log_level: Optional[str] = None
|
||||
log_level_file: Optional[str] = None
|
||||
partial_parse: Optional[bool] = None
|
||||
populate_cache: Optional[bool] = None
|
||||
printer_width: Optional[int] = None
|
||||
write_json: Optional[bool] = None
|
||||
send_anonymous_usage_stats: bool = DEFAULT_SEND_ANONYMOUS_USAGE_STATS
|
||||
static_parser: Optional[bool] = None
|
||||
use_colors: Optional[bool] = None
|
||||
use_colors_file: Optional[bool] = None
|
||||
use_experimental_parser: Optional[bool] = None
|
||||
version_check: Optional[bool] = None
|
||||
warn_error: Optional[bool] = None
|
||||
warn_error_options: Optional[Dict[str, Union[str, List[str]]]] = None
|
||||
log_format: Optional[str] = None
|
||||
debug: Optional[bool] = None
|
||||
version_check: Optional[bool] = None
|
||||
fail_fast: Optional[bool] = None
|
||||
use_experimental_parser: Optional[bool] = None
|
||||
static_parser: Optional[bool] = None
|
||||
indirect_selection: Optional[str] = None
|
||||
cache_selected_only: Optional[bool] = None
|
||||
write_json: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -10,8 +10,8 @@ from dbt.contracts.util import (
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import TimingInfoCollected
|
||||
from dbt.events.proto_types import RunResultMsg, TimingInfoMsg
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.events.helpers import datetime_to_json_string
|
||||
from dbt.logger import TimingProcessor
|
||||
from dbt.utils import lowercase, cast_to_str, cast_to_int
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
@@ -45,11 +45,13 @@ class TimingInfo(dbtClassMixin):
|
||||
def end(self):
|
||||
self.completed_at = datetime.utcnow()
|
||||
|
||||
def to_msg(self):
|
||||
timsg = TimingInfoMsg(
|
||||
name=self.name, started_at=self.started_at, completed_at=self.completed_at
|
||||
)
|
||||
return timsg
|
||||
def to_msg_dict(self):
|
||||
msg_dict = {"name": self.name}
|
||||
if self.started_at:
|
||||
msg_dict["started_at"] = datetime_to_json_string(self.started_at)
|
||||
if self.completed_at:
|
||||
msg_dict["completed_at"] = datetime_to_json_string(self.completed_at)
|
||||
return msg_dict
|
||||
|
||||
|
||||
# This is a context manager
|
||||
@@ -67,7 +69,7 @@ class collect_timing_info:
|
||||
with TimingProcessor(self.timing_info):
|
||||
fire_event(
|
||||
TimingInfoCollected(
|
||||
timing_info=self.timing_info.to_msg(), node_info=get_node_info()
|
||||
timing_info=self.timing_info.to_msg_dict(), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
|
||||
@@ -129,17 +131,17 @@ class BaseResult(dbtClassMixin):
|
||||
data["failures"] = None
|
||||
return data
|
||||
|
||||
def to_msg(self):
|
||||
# TODO: add more fields
|
||||
msg = RunResultMsg()
|
||||
msg.status = str(self.status)
|
||||
msg.message = cast_to_str(self.message)
|
||||
msg.thread = self.thread_id
|
||||
msg.execution_time = self.execution_time
|
||||
msg.num_failures = cast_to_int(self.failures)
|
||||
msg.timing_info = [ti.to_msg() for ti in self.timing]
|
||||
# adapter_response
|
||||
return msg
|
||||
def to_msg_dict(self):
|
||||
msg_dict = {
|
||||
"status": str(self.status),
|
||||
"message": cast_to_str(self.message),
|
||||
"thread": self.thread_id,
|
||||
"execution_time": self.execution_time,
|
||||
"num_failures": cast_to_int(self.failures),
|
||||
"timing_info": [ti.to_msg_dict() for ti in self.timing],
|
||||
"adapter_response": self.adapter_response,
|
||||
}
|
||||
return msg_dict
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -7,15 +7,17 @@ from dbt.exceptions import IncompatibleSchemaError
|
||||
|
||||
|
||||
class PreviousState:
|
||||
def __init__(self, path: Path, current_path: Path):
|
||||
self.path: Path = path
|
||||
self.current_path: Path = current_path
|
||||
def __init__(self, state_path: Path, target_path: Path, project_root: Path):
|
||||
self.state_path: Path = state_path
|
||||
self.target_path: Path = target_path
|
||||
self.project_root: Path = project_root
|
||||
self.manifest: Optional[WritableManifest] = None
|
||||
self.results: Optional[RunResultsArtifact] = None
|
||||
self.sources: Optional[FreshnessExecutionResultArtifact] = None
|
||||
self.sources_current: Optional[FreshnessExecutionResultArtifact] = None
|
||||
|
||||
manifest_path = self.path / "manifest.json"
|
||||
# Note: if state_path is absolute, project_root will be ignored.
|
||||
manifest_path = self.project_root / self.state_path / "manifest.json"
|
||||
if manifest_path.exists() and manifest_path.is_file():
|
||||
try:
|
||||
self.manifest = WritableManifest.read_and_check_versions(str(manifest_path))
|
||||
@@ -23,7 +25,7 @@ class PreviousState:
|
||||
exc.add_filename(str(manifest_path))
|
||||
raise
|
||||
|
||||
results_path = self.path / "run_results.json"
|
||||
results_path = self.project_root / self.state_path / "run_results.json"
|
||||
if results_path.exists() and results_path.is_file():
|
||||
try:
|
||||
self.results = RunResultsArtifact.read_and_check_versions(str(results_path))
|
||||
@@ -31,7 +33,7 @@ class PreviousState:
|
||||
exc.add_filename(str(results_path))
|
||||
raise
|
||||
|
||||
sources_path = self.path / "sources.json"
|
||||
sources_path = self.project_root / self.state_path / "sources.json"
|
||||
if sources_path.exists() and sources_path.is_file():
|
||||
try:
|
||||
self.sources = FreshnessExecutionResultArtifact.read_and_check_versions(
|
||||
@@ -41,7 +43,7 @@ class PreviousState:
|
||||
exc.add_filename(str(sources_path))
|
||||
raise
|
||||
|
||||
sources_current_path = self.current_path / "sources.json"
|
||||
sources_current_path = self.project_root / self.target_path / "sources.json"
|
||||
if sources_current_path.exists() and sources_current_path.is_file():
|
||||
try:
|
||||
self.sources_current = FreshnessExecutionResultArtifact.read_and_check_versions(
|
||||
|
||||
@@ -3,13 +3,13 @@ from datetime import datetime
|
||||
from typing import List, Tuple, ClassVar, Type, TypeVar, Dict, Any, Optional
|
||||
|
||||
from dbt.clients.system import write_json, read_json
|
||||
from dbt import deprecations
|
||||
from dbt.exceptions import (
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
IncompatibleSchemaError,
|
||||
)
|
||||
from dbt.version import __version__
|
||||
|
||||
from dbt.events.functions import get_invocation_id, get_metadata_vars
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
|
||||
@@ -186,115 +186,6 @@ def schema_version(name: str, version: int):
|
||||
return inner
|
||||
|
||||
|
||||
def get_manifest_schema_version(dct: dict) -> int:
|
||||
schema_version = dct.get("metadata", {}).get("dbt_schema_version", None)
|
||||
if not schema_version:
|
||||
raise ValueError("Manifest doesn't have schema version")
|
||||
return int(schema_version.split(".")[-2][-1])
|
||||
|
||||
|
||||
# we renamed these properties in v1.3
|
||||
# this method allows us to be nice to the early adopters
|
||||
def rename_metric_attr(data: dict, raise_deprecation_warning: bool = False) -> dict:
|
||||
metric_name = data["name"]
|
||||
if raise_deprecation_warning and (
|
||||
"sql" in data.keys()
|
||||
or "type" in data.keys()
|
||||
or data.get("calculation_method") == "expression"
|
||||
):
|
||||
deprecations.warn("metric-attr-renamed", metric_name=metric_name)
|
||||
duplicated_attribute_msg = """\n
|
||||
The metric '{}' contains both the deprecated metric property '{}'
|
||||
and the up-to-date metric property '{}'. Please remove the deprecated property.
|
||||
"""
|
||||
if "sql" in data.keys():
|
||||
if "expression" in data.keys():
|
||||
raise ValidationError(
|
||||
duplicated_attribute_msg.format(metric_name, "sql", "expression")
|
||||
)
|
||||
else:
|
||||
data["expression"] = data.pop("sql")
|
||||
if "type" in data.keys():
|
||||
if "calculation_method" in data.keys():
|
||||
raise ValidationError(
|
||||
duplicated_attribute_msg.format(metric_name, "type", "calculation_method")
|
||||
)
|
||||
else:
|
||||
calculation_method = data.pop("type")
|
||||
data["calculation_method"] = calculation_method
|
||||
# we also changed "type: expression" -> "calculation_method: derived"
|
||||
if data.get("calculation_method") == "expression":
|
||||
data["calculation_method"] = "derived"
|
||||
return data
|
||||
|
||||
|
||||
def rename_sql_attr(node_content: dict) -> dict:
|
||||
if "raw_sql" in node_content:
|
||||
node_content["raw_code"] = node_content.pop("raw_sql")
|
||||
if "compiled_sql" in node_content:
|
||||
node_content["compiled_code"] = node_content.pop("compiled_sql")
|
||||
node_content["language"] = "sql"
|
||||
return node_content
|
||||
|
||||
|
||||
def upgrade_node_content(node_content):
|
||||
rename_sql_attr(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
|
||||
|
||||
def upgrade_seed_content(node_content):
|
||||
# Remove compilation related attributes
|
||||
for attr_name in (
|
||||
"language",
|
||||
"refs",
|
||||
"sources",
|
||||
"metrics",
|
||||
"depends_on",
|
||||
"compiled_path",
|
||||
"compiled",
|
||||
"compiled_code",
|
||||
"extra_ctes_injected",
|
||||
"extra_ctes",
|
||||
"relation_name",
|
||||
):
|
||||
if attr_name in node_content:
|
||||
del node_content[attr_name]
|
||||
|
||||
|
||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
for node_content in manifest.get("nodes", {}).values():
|
||||
upgrade_node_content(node_content)
|
||||
if node_content["resource_type"] == "seed":
|
||||
upgrade_seed_content(node_content)
|
||||
for disabled in manifest.get("disabled", {}).values():
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
# so make sure all the nodes get the attr renamed
|
||||
for node_content in disabled:
|
||||
upgrade_node_content(node_content)
|
||||
if node_content["resource_type"] == "seed":
|
||||
upgrade_seed_content(node_content)
|
||||
for metric_content in manifest.get("metrics", {}).values():
|
||||
# handle attr renames + value translation ("expression" -> "derived")
|
||||
metric_content = rename_metric_attr(metric_content)
|
||||
if "root_path" in metric_content:
|
||||
del metric_content["root_path"]
|
||||
for exposure_content in manifest.get("exposures", {}).values():
|
||||
if "root_path" in exposure_content:
|
||||
del exposure_content["root_path"]
|
||||
for source_content in manifest.get("sources", {}).values():
|
||||
if "root_path" in exposure_content:
|
||||
del source_content["root_path"]
|
||||
for macro_content in manifest.get("macros", {}).values():
|
||||
if "root_path" in macro_content:
|
||||
del macro_content["root_path"]
|
||||
for doc_content in manifest.get("docs", {}).values():
|
||||
if "root_path" in doc_content:
|
||||
del doc_content["root_path"]
|
||||
doc_content["resource_type"] = "doc"
|
||||
return manifest
|
||||
|
||||
|
||||
# This is used in the ArtifactMixin and RemoteResult classes
|
||||
@dataclasses.dataclass
|
||||
class VersionedSchema(dbtClassMixin):
|
||||
@@ -336,9 +227,15 @@ class VersionedSchema(dbtClassMixin):
|
||||
expected=str(cls.dbt_schema_version),
|
||||
found=previous_schema_version,
|
||||
)
|
||||
if get_manifest_schema_version(data) <= 7:
|
||||
data = upgrade_manifest_json(data)
|
||||
return cls.from_dict(data) # type: ignore
|
||||
|
||||
return cls.upgrade_schema_version(data)
|
||||
|
||||
@classmethod
|
||||
def upgrade_schema_version(cls, data):
|
||||
"""This will modify the data (dictionary) passed in to match the current
|
||||
artifact schema code, if necessary. This is the default method, which
|
||||
just returns the instantiated object via from_dict."""
|
||||
return cls.from_dict(data)
|
||||
|
||||
|
||||
T = TypeVar("T", bound="ArtifactMixin")
|
||||
|
||||
@@ -81,6 +81,36 @@ class ExposureNameDeprecation(DBTDeprecation):
|
||||
_event = "ExposureNameDeprecation"
|
||||
|
||||
|
||||
class ConfigLogPathDeprecation(DBTDeprecation):
|
||||
_name = "project-config-log-path"
|
||||
_event = "ConfigLogPathDeprecation"
|
||||
|
||||
|
||||
class ConfigTargetPathDeprecation(DBTDeprecation):
|
||||
_name = "project-config-target-path"
|
||||
_event = "ConfigTargetPathDeprecation"
|
||||
|
||||
|
||||
class CollectFreshnessReturnSignature(DBTDeprecation):
|
||||
_name = "collect-freshness-return-signature"
|
||||
_event = "CollectFreshnessReturnSignature"
|
||||
|
||||
|
||||
def renamed_env_var(old_name: str, new_name: str):
|
||||
class EnvironmentVariableRenamed(DBTDeprecation):
|
||||
_name = f"environment-variable-renamed:{old_name}"
|
||||
_event = "EnvironmentVariableRenamed"
|
||||
|
||||
dep = EnvironmentVariableRenamed()
|
||||
deprecations_list.append(dep)
|
||||
deprecations[dep.name] = dep
|
||||
|
||||
def cb():
|
||||
dep.show(old_name=old_name, new_name=new_name)
|
||||
|
||||
return cb
|
||||
|
||||
|
||||
def warn(name, *args, **kwargs):
|
||||
if name not in deprecations:
|
||||
# this should (hopefully) never happen
|
||||
@@ -101,6 +131,9 @@ deprecations_list: List[DBTDeprecation] = [
|
||||
ConfigDataPathDeprecation(),
|
||||
MetricAttributesRenamed(),
|
||||
ExposureNameDeprecation(),
|
||||
ConfigLogPathDeprecation(),
|
||||
ConfigTargetPathDeprecation(),
|
||||
CollectFreshnessReturnSignature(),
|
||||
]
|
||||
|
||||
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import os
|
||||
import hashlib
|
||||
from typing import List, Optional
|
||||
|
||||
from dbt.clients import git, system
|
||||
from dbt.config import Project
|
||||
from dbt.config.project import PartialProject, Project
|
||||
from dbt.config.renderer import PackageRenderer
|
||||
from dbt.contracts.project import (
|
||||
ProjectPackageMetadata,
|
||||
GitPackage,
|
||||
@@ -12,10 +12,11 @@ from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||
from dbt.exceptions import ExecutableError, MultipleVersionGitDepsError
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import EnsureGitInstalled, DepsUnpinned
|
||||
from dbt.utils import md5
|
||||
|
||||
|
||||
def md5sum(s: str):
|
||||
return hashlib.md5(s.encode("latin-1")).hexdigest()
|
||||
return md5(s, "latin-1")
|
||||
|
||||
|
||||
class GitPackageMixin:
|
||||
@@ -76,13 +77,15 @@ class GitPinnedPackage(GitPackageMixin, PinnedPackage):
|
||||
raise
|
||||
return os.path.join(get_downloads_path(), dir_)
|
||||
|
||||
def _fetch_metadata(self, project, renderer) -> ProjectPackageMetadata:
|
||||
def _fetch_metadata(
|
||||
self, project: Project, renderer: PackageRenderer
|
||||
) -> ProjectPackageMetadata:
|
||||
path = self._checkout()
|
||||
|
||||
if (self.revision == "HEAD" or self.revision in ("main", "master")) and self.warn_unpinned:
|
||||
warn_or_error(DepsUnpinned(git=self.git))
|
||||
loaded = Project.from_project_root(path, renderer)
|
||||
return ProjectPackageMetadata.from_project(loaded)
|
||||
partial = PartialProject.from_project_root(path)
|
||||
return partial.render_package_metadata(renderer)
|
||||
|
||||
def install(self, project, renderer):
|
||||
dest_path = self.get_installation_path(project, renderer)
|
||||
|
||||
@@ -8,6 +8,8 @@ from dbt.contracts.project import (
|
||||
)
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import DepsCreatingLocalSymlink, DepsSymlinkNotAvailable
|
||||
from dbt.config.project import PartialProject, Project
|
||||
from dbt.config.renderer import PackageRenderer
|
||||
|
||||
|
||||
class LocalPackageMixin:
|
||||
@@ -39,9 +41,11 @@ class LocalPinnedPackage(LocalPackageMixin, PinnedPackage):
|
||||
project.project_root,
|
||||
)
|
||||
|
||||
def _fetch_metadata(self, project, renderer):
|
||||
loaded = project.from_project_root(self.resolve_path(project), renderer)
|
||||
return ProjectPackageMetadata.from_project(loaded)
|
||||
def _fetch_metadata(
|
||||
self, project: Project, renderer: PackageRenderer
|
||||
) -> ProjectPackageMetadata:
|
||||
partial = PartialProject.from_project_root(self.resolve_path(project))
|
||||
return partial.render_package_metadata(renderer)
|
||||
|
||||
def install(self, project, renderer):
|
||||
src_path = self.resolve_path(project)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from dbt import semver
|
||||
from dbt import flags
|
||||
from dbt.flags import get_flags
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.clients import registry
|
||||
from dbt.contracts.project import (
|
||||
@@ -98,7 +98,7 @@ class RegistryUnpinnedPackage(RegistryPackageMixin, UnpinnedPackage[RegistryPinn
|
||||
except VersionsNotCompatibleError as e:
|
||||
new_msg = "Version error for package {}: {}".format(self.name, e)
|
||||
raise DependencyError(new_msg) from e
|
||||
|
||||
flags = get_flags()
|
||||
should_version_check = bool(flags.VERSION_CHECK)
|
||||
dbt_version = get_installed_version()
|
||||
compatible_versions = registry.get_compatible_versions(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, NoReturn, Union, Type, Iterator, Set
|
||||
from typing import Dict, List, NoReturn, Union, Type, Iterator, Set, Any
|
||||
|
||||
from dbt.exceptions import (
|
||||
DuplicateDependencyToRootError,
|
||||
@@ -8,8 +8,8 @@ from dbt.exceptions import (
|
||||
DbtInternalError,
|
||||
)
|
||||
|
||||
from dbt.config import Project, RuntimeConfig
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer
|
||||
from dbt.config import Project
|
||||
from dbt.config.renderer import PackageRenderer
|
||||
from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage
|
||||
from dbt.deps.local import LocalUnpinnedPackage
|
||||
from dbt.deps.tarball import TarballUnpinnedPackage
|
||||
@@ -100,35 +100,38 @@ class PackageListing:
|
||||
|
||||
def _check_for_duplicate_project_names(
|
||||
final_deps: List[PinnedPackage],
|
||||
config: Project,
|
||||
renderer: DbtProjectYamlRenderer,
|
||||
project: Project,
|
||||
renderer: PackageRenderer,
|
||||
):
|
||||
seen: Set[str] = set()
|
||||
for package in final_deps:
|
||||
project_name = package.get_project_name(config, renderer)
|
||||
project_name = package.get_project_name(project, renderer)
|
||||
if project_name in seen:
|
||||
raise DuplicateProjectDependencyError(project_name)
|
||||
elif project_name == config.project_name:
|
||||
elif project_name == project.project_name:
|
||||
raise DuplicateDependencyToRootError(project_name)
|
||||
seen.add(project_name)
|
||||
|
||||
|
||||
def resolve_packages(
|
||||
packages: List[PackageContract], config: RuntimeConfig
|
||||
packages: List[PackageContract],
|
||||
project: Project,
|
||||
cli_vars: Dict[str, Any],
|
||||
) -> List[PinnedPackage]:
|
||||
pending = PackageListing.from_contracts(packages)
|
||||
final = PackageListing()
|
||||
renderer = DbtProjectYamlRenderer(config, config.cli_vars)
|
||||
|
||||
renderer = PackageRenderer(cli_vars)
|
||||
|
||||
while pending:
|
||||
next_pending = PackageListing()
|
||||
# resolve the dependency in question
|
||||
for package in pending:
|
||||
final.incorporate(package)
|
||||
target = final[package].resolved().fetch_metadata(config, renderer)
|
||||
target = final[package].resolved().fetch_metadata(project, renderer)
|
||||
next_pending.update_from(target.packages)
|
||||
pending = next_pending
|
||||
|
||||
resolved = final.resolved()
|
||||
_check_for_duplicate_project_names(resolved, config, renderer)
|
||||
_check_for_duplicate_project_names(resolved, project, renderer)
|
||||
return resolved
|
||||
|
||||
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Binary file not shown.
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
Binary file not shown.
2
core/dbt/docs/build/html/.buildinfo
vendored
2
core/dbt/docs/build/html/.buildinfo
vendored
@@ -1,4 +1,4 @@
|
||||
# Sphinx build info version 1
|
||||
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
|
||||
config: e27d6c1c419f2f0af393858cdf674109
|
||||
config: 0d25ef12a43286020bcd8b805064f01c
|
||||
tags: 645f666f9bcd5a90fca523b33c5a78b7
|
||||
|
||||
33
core/dbt/docs/build/html/_sources/index.rst.txt
vendored
33
core/dbt/docs/build/html/_sources/index.rst.txt
vendored
@@ -1,4 +1,37 @@
|
||||
dbt-core's API documentation
|
||||
============================
|
||||
How to invoke dbt commands in python runtime
|
||||
--------------------------------------------
|
||||
|
||||
Right now the best way to invoke a command from python runtime is to use the `dbtRunner` we exposed
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from dbt.cli.main import dbtRunner
|
||||
cli_args = ['run', '--project-dir', 'jaffle_shop']
|
||||
|
||||
# initialize the dbt runner
|
||||
dbt = dbtRunner()
|
||||
# run the command
|
||||
res, success = dbt.invoke(args)
|
||||
|
||||
You can also pass in pre constructed object into dbtRunner, and we will use those objects instead of loading up from the disk.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# preload profile and project
|
||||
profile = load_profile(project_dir, {}, 'testing-postgres')
|
||||
project = load_project(project_dir, False, profile, {})
|
||||
|
||||
# initialize the runner with pre-loaded profile and project
|
||||
dbt = dbtRunner(profile=profile, project=project)
|
||||
# run the command, this will use the pre-loaded profile and project instead of loading
|
||||
res, success = dbt.invoke(cli_args)
|
||||
|
||||
|
||||
For the full example code, you can refer to `core/dbt/cli/example.py`
|
||||
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
.. dbt_click:: dbt.cli.main:cli
|
||||
|
||||
134
core/dbt/docs/build/html/_static/_sphinx_javascript_frameworks_compat.js
vendored
Normal file
134
core/dbt/docs/build/html/_static/_sphinx_javascript_frameworks_compat.js
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
/*
|
||||
* _sphinx_javascript_frameworks_compat.js
|
||||
* ~~~~~~~~~~
|
||||
*
|
||||
* Compatability shim for jQuery and underscores.js.
|
||||
*
|
||||
* WILL BE REMOVED IN Sphinx 6.0
|
||||
* xref RemovedInSphinx60Warning
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* select a different prefix for underscore
|
||||
*/
|
||||
$u = _.noConflict();
|
||||
|
||||
|
||||
/**
|
||||
* small helper function to urldecode strings
|
||||
*
|
||||
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL
|
||||
*/
|
||||
jQuery.urldecode = function(x) {
|
||||
if (!x) {
|
||||
return x
|
||||
}
|
||||
return decodeURIComponent(x.replace(/\+/g, ' '));
|
||||
};
|
||||
|
||||
/**
|
||||
* small helper function to urlencode strings
|
||||
*/
|
||||
jQuery.urlencode = encodeURIComponent;
|
||||
|
||||
/**
|
||||
* This function returns the parsed url parameters of the
|
||||
* current request. Multiple values per key are supported,
|
||||
* it will always return arrays of strings for the value parts.
|
||||
*/
|
||||
jQuery.getQueryParameters = function(s) {
|
||||
if (typeof s === 'undefined')
|
||||
s = document.location.search;
|
||||
var parts = s.substr(s.indexOf('?') + 1).split('&');
|
||||
var result = {};
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
var tmp = parts[i].split('=', 2);
|
||||
var key = jQuery.urldecode(tmp[0]);
|
||||
var value = jQuery.urldecode(tmp[1]);
|
||||
if (key in result)
|
||||
result[key].push(value);
|
||||
else
|
||||
result[key] = [value];
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* highlight a given string on a jquery object by wrapping it in
|
||||
* span elements with the given class name.
|
||||
*/
|
||||
jQuery.fn.highlightText = function(text, className) {
|
||||
function highlight(node, addItems) {
|
||||
if (node.nodeType === 3) {
|
||||
var val = node.nodeValue;
|
||||
var pos = val.toLowerCase().indexOf(text);
|
||||
if (pos >= 0 &&
|
||||
!jQuery(node.parentNode).hasClass(className) &&
|
||||
!jQuery(node.parentNode).hasClass("nohighlight")) {
|
||||
var span;
|
||||
var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
|
||||
if (isInSVG) {
|
||||
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||
} else {
|
||||
span = document.createElement("span");
|
||||
span.className = className;
|
||||
}
|
||||
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
|
||||
document.createTextNode(val.substr(pos + text.length)),
|
||||
node.nextSibling));
|
||||
node.nodeValue = val.substr(0, pos);
|
||||
if (isInSVG) {
|
||||
var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
|
||||
var bbox = node.parentElement.getBBox();
|
||||
rect.x.baseVal.value = bbox.x;
|
||||
rect.y.baseVal.value = bbox.y;
|
||||
rect.width.baseVal.value = bbox.width;
|
||||
rect.height.baseVal.value = bbox.height;
|
||||
rect.setAttribute('class', className);
|
||||
addItems.push({
|
||||
"parent": node.parentNode,
|
||||
"target": rect});
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (!jQuery(node).is("button, select, textarea")) {
|
||||
jQuery.each(node.childNodes, function() {
|
||||
highlight(this, addItems);
|
||||
});
|
||||
}
|
||||
}
|
||||
var addItems = [];
|
||||
var result = this.each(function() {
|
||||
highlight(this, addItems);
|
||||
});
|
||||
for (var i = 0; i < addItems.length; ++i) {
|
||||
jQuery(addItems[i].parent).before(addItems[i].target);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/*
|
||||
* backward compatibility for jQuery.browser
|
||||
* This will be supported until firefox bug is fixed.
|
||||
*/
|
||||
if (!jQuery.browser) {
|
||||
jQuery.uaMatch = function(ua) {
|
||||
ua = ua.toLowerCase();
|
||||
|
||||
var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
|
||||
/(webkit)[ \/]([\w.]+)/.exec(ua) ||
|
||||
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
|
||||
/(msie) ([\w.]+)/.exec(ua) ||
|
||||
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
|
||||
[];
|
||||
|
||||
return {
|
||||
browser: match[ 1 ] || "",
|
||||
version: match[ 2 ] || "0"
|
||||
};
|
||||
};
|
||||
jQuery.browser = {};
|
||||
jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
|
||||
}
|
||||
3
core/dbt/docs/build/html/_static/basic.css
vendored
3
core/dbt/docs/build/html/_static/basic.css
vendored
@@ -324,7 +324,6 @@ aside.sidebar {
|
||||
p.sidebar-title {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.admonition, div.topic, blockquote {
|
||||
@@ -332,7 +331,6 @@ div.admonition, div.topic, blockquote {
|
||||
}
|
||||
|
||||
/* -- topics ---------------------------------------------------------------- */
|
||||
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.topic {
|
||||
@@ -608,7 +606,6 @@ ol.simple p,
|
||||
ul.simple p {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
aside.footnote > span,
|
||||
div.citation > span {
|
||||
float: left;
|
||||
|
||||
10881
core/dbt/docs/build/html/_static/jquery-3.6.0.js
vendored
Normal file
10881
core/dbt/docs/build/html/_static/jquery-3.6.0.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
core/dbt/docs/build/html/_static/jquery.js
vendored
Normal file
2
core/dbt/docs/build/html/_static/jquery.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -54,7 +54,6 @@ span.linenos.special { color: #000000; background-color: #ffffc0; padding-left:
|
||||
.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */
|
||||
.highlight .nv { color: #000000 } /* Name.Variable */
|
||||
.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */
|
||||
.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */
|
||||
.highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */
|
||||
.highlight .mb { color: #990000 } /* Literal.Number.Bin */
|
||||
.highlight .mf { color: #990000 } /* Literal.Number.Float */
|
||||
|
||||
2042
core/dbt/docs/build/html/_static/underscore-1.13.1.js
vendored
Normal file
2042
core/dbt/docs/build/html/_static/underscore-1.13.1.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6
core/dbt/docs/build/html/_static/underscore.js
vendored
Normal file
6
core/dbt/docs/build/html/_static/underscore.js
vendored
Normal file
File diff suppressed because one or more lines are too long
5
core/dbt/docs/build/html/genindex.html
vendored
5
core/dbt/docs/build/html/genindex.html
vendored
@@ -9,6 +9,9 @@
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
<script src="_static/jquery.js"></script>
|
||||
<script src="_static/underscore.js"></script>
|
||||
<script src="_static/_sphinx_javascript_frameworks_compat.js"></script>
|
||||
<script src="_static/doctools.js"></script>
|
||||
<script src="_static/sphinx_highlight.js"></script>
|
||||
<link rel="index" title="Index" href="#" />
|
||||
@@ -87,7 +90,7 @@
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.0.0</a>
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 5.2.3</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
|
||||
</div>
|
||||
|
||||
570
core/dbt/docs/build/html/index.html
vendored
570
core/dbt/docs/build/html/index.html
vendored
File diff suppressed because it is too large
Load Diff
5
core/dbt/docs/build/html/search.html
vendored
5
core/dbt/docs/build/html/search.html
vendored
@@ -10,6 +10,9 @@
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
<script src="_static/jquery.js"></script>
|
||||
<script src="_static/underscore.js"></script>
|
||||
<script src="_static/_sphinx_javascript_frameworks_compat.js"></script>
|
||||
<script src="_static/doctools.js"></script>
|
||||
<script src="_static/sphinx_highlight.js"></script>
|
||||
<script src="_static/searchtools.js"></script>
|
||||
@@ -106,7 +109,7 @@
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.0.0</a>
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 5.2.3</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
|
||||
</div>
|
||||
|
||||
2
core/dbt/docs/build/html/searchindex.js
vendored
2
core/dbt/docs/build/html/searchindex.js
vendored
File diff suppressed because one or more lines are too long
@@ -44,7 +44,9 @@ def format_params(cmd) -> t.List[nodes.section]:
|
||||
type_str = get_type_str(param.type)
|
||||
|
||||
param_section.append(nodes.paragraph(text=f"Type: {type_str}"))
|
||||
param_section.append(nodes.paragraph(text=param.help))
|
||||
help_txt = getattr(param, "help", None)
|
||||
if help_txt is not None:
|
||||
param_section.append(nodes.paragraph(text=help_txt))
|
||||
lines.append(param_section)
|
||||
return lines
|
||||
|
||||
|
||||
@@ -1,4 +1,37 @@
|
||||
dbt-core's API documentation
|
||||
============================
|
||||
Programmatic invocations
|
||||
--------------------------------------------
|
||||
|
||||
In v1.5, dbt-core added support for programmatic invocations. The intent of this entry point is provide **exact parity** with CLI functionality, callable from within a Python script or application.
|
||||
|
||||
The main entry point is a ``dbtRunner`` class that wraps around ``dbt-core``'s CLI, and allows you to "invoke" CLI commands as Python methods. Each command returns a `dbtRunnerResult` object, which has three attributes:
|
||||
|
||||
* ``success`` (bool): Whether the command succeeded.
|
||||
* ``result``: If the command completed (successfully or with handled errors), its result(s). Return type varies by command.
|
||||
* ``exception``: If the dbt invocation encountered an unhandled error and did not complete, the exception it encountered.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from dbt.cli.main import dbtRunner, dbtRunnerResult
|
||||
|
||||
# initialize
|
||||
dbt = dbtRunner()
|
||||
|
||||
# create CLI args as a list of strings
|
||||
cli_args = ["run", "--select", "tag:my_tag"]
|
||||
|
||||
# run the command
|
||||
res: dbtRunnerResult = dbt.invoke(cli_args)
|
||||
|
||||
# inspect the results
|
||||
for r in res.result:
|
||||
print(f"{r.node.name}: {r.status}")
|
||||
|
||||
|
||||
For more information and examples, consult the documentation: https://docs.getdbt.com/reference/programmatic-invocations
|
||||
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
.. dbt_click:: dbt.cli.main:cli
|
||||
|
||||
@@ -8,13 +8,14 @@ The event module provides types that represent what is happening in dbt in `even
|
||||
When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions.
|
||||
|
||||
# Adding a New Event
|
||||
* Add a new message in types.proto with an EventInfo field first
|
||||
* run the protoc compiler to update proto_types.py: ```protoc --python_betterproto_out . types.proto```
|
||||
* Add a wrapping class in core/dbt/event/types.py with a Level superclass and the superclass from proto_types.py, plus code and message methods
|
||||
* Add a new message in types.proto, and a second message with the same name + "Msg". The "Msg" message should have two fields, an "info" field of EventInfo, and a "data" field referring to the message name without "Msg"
|
||||
* run the protoc compiler to update types_pb2.py: make proto_types
|
||||
* Add a wrapping class in core/dbt/event/types.py with a Level superclass plus code and message methods
|
||||
* Add the class to tests/unit/test_events.py
|
||||
|
||||
Note that no attributes can exist in these event classes except for fields defined in the protobuf definitions, because the betterproto metaclass will throw an error. Betterproto provides a to_dict() method to convert the generated classes to a dictionary and from that to json. However some attributes will successfully convert to dictionaries but not to serialized protobufs, so we need to test both output formats.
|
||||
We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto.
|
||||
|
||||
The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters.
|
||||
|
||||
## Required for Every Event
|
||||
|
||||
@@ -24,8 +25,7 @@ Note that no attributes can exist in these event classes except for fields defin
|
||||
|
||||
Example
|
||||
```
|
||||
@dataclass
|
||||
class PartialParsingDeletedExposure(DebugLevel, pt.PartialParsingDeletedExposure):
|
||||
class PartialParsingDeletedExposure(DebugLevel):
|
||||
def code(self):
|
||||
return "I049"
|
||||
|
||||
@@ -50,4 +50,6 @@ logger = AdapterLogger("<database name>")
|
||||
|
||||
## Compiling types.proto
|
||||
|
||||
After adding a new message in types.proto, in the core/dbt/events directory: ```protoc --python_betterproto_out . types.proto```
|
||||
After adding a new message in `types.proto`, either:
|
||||
- In the repository root directory: `make proto_types`
|
||||
- In the `core/dbt/events` directory: `protoc -I=. --python_out=. types.proto`
|
||||
|
||||
@@ -17,38 +17,42 @@ class AdapterLogger:
|
||||
|
||||
def debug(self, msg, *args):
|
||||
event = AdapterEventDebug(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def info(self, msg, *args):
|
||||
event = AdapterEventInfo(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def warning(self, msg, *args):
|
||||
event = AdapterEventWarning(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def error(self, msg, *args):
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
# The default exc_info=True is what makes this method different
|
||||
def exception(self, msg, *args):
|
||||
exc_info = str(traceback.format_exc())
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
name=self.name,
|
||||
base_msg=str(msg),
|
||||
args=list(args),
|
||||
node_info=get_node_info(),
|
||||
exc_info=exc_info,
|
||||
)
|
||||
event.exc_info = traceback.format_exc()
|
||||
fire_event(event)
|
||||
|
||||
def critical(self, msg, *args):
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
@@ -1,27 +1,24 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import os
|
||||
import threading
|
||||
from datetime import datetime
|
||||
import dbt.events.proto_types as pt
|
||||
from dbt.events import types_pb2
|
||||
import sys
|
||||
from google.protobuf.json_format import ParseDict, MessageToDict, MessageToJson
|
||||
from google.protobuf.message import Message
|
||||
from dbt.events.helpers import get_json_string_utcnow
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Protocol
|
||||
else:
|
||||
from typing_extensions import Protocol
|
||||
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# These base types define the _required structure_ for the concrete event #
|
||||
# types defined in types.py #
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
|
||||
class Cache:
|
||||
# Events with this class will only be logged when the `--log-cache-events` flag is passed
|
||||
pass
|
||||
|
||||
|
||||
def get_global_metadata_vars() -> dict:
|
||||
from dbt.events.functions import get_metadata_vars
|
||||
|
||||
@@ -39,13 +36,6 @@ def get_pid() -> int:
|
||||
return os.getpid()
|
||||
|
||||
|
||||
# preformatted time stamp
|
||||
def get_ts_rfc3339() -> str:
|
||||
ts = datetime.utcnow()
|
||||
ts_rfc3339 = ts.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
return ts_rfc3339
|
||||
|
||||
|
||||
# in theory threads can change so we don't cache them.
|
||||
def get_thread_name() -> str:
|
||||
return threading.current_thread().name
|
||||
@@ -61,24 +51,58 @@ class EventLevel(str, Enum):
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""BaseEvent for proto message generated python events"""
|
||||
|
||||
# def __post_init__(self):
|
||||
# super().__post_init__()
|
||||
# if not self.info.level:
|
||||
# self.info.level = self.level_tag()
|
||||
# assert self.info.level in ["info", "warn", "error", "debug", "test"]
|
||||
# if not hasattr(self.info, "msg") or not self.info.msg:
|
||||
# self.info.msg = self.message()
|
||||
# self.info.invocation_id = get_invocation_id()
|
||||
# self.info.extra = get_global_metadata_vars()
|
||||
# self.info.ts = datetime.utcnow()
|
||||
# self.info.pid = get_pid()
|
||||
# self.info.thread = get_thread_name()
|
||||
# self.info.code = self.code()
|
||||
# self.info.name = type(self).__name__
|
||||
def __init__(self, *args, **kwargs):
|
||||
class_name = type(self).__name__
|
||||
msg_cls = getattr(types_pb2, class_name)
|
||||
if class_name == "Formatting" and len(args) > 0:
|
||||
kwargs["msg"] = args[0]
|
||||
args = ()
|
||||
assert (
|
||||
len(args) == 0
|
||||
), f"[{class_name}] Don't use positional arguments when constructing logging events"
|
||||
if "base_msg" in kwargs:
|
||||
kwargs["base_msg"] = str(kwargs["base_msg"])
|
||||
if "msg" in kwargs:
|
||||
kwargs["msg"] = str(kwargs["msg"])
|
||||
try:
|
||||
self.pb_msg = ParseDict(kwargs, msg_cls())
|
||||
except Exception:
|
||||
# Imports need to be here to avoid circular imports
|
||||
from dbt.events.types import Note
|
||||
from dbt.events.functions import fire_event
|
||||
|
||||
error_msg = f"[{class_name}]: Unable to parse dict {kwargs}"
|
||||
# If we're testing throw an error so that we notice failures
|
||||
if "pytest" in sys.modules:
|
||||
raise Exception(error_msg)
|
||||
else:
|
||||
fire_event(Note(msg=error_msg), level=EventLevel.WARN)
|
||||
self.pb_msg = msg_cls()
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key == "pb_msg":
|
||||
super().__setattr__(key, value)
|
||||
else:
|
||||
super().__getattribute__("pb_msg").__setattr__(key, value)
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key == "pb_msg":
|
||||
return super().__getattribute__(key)
|
||||
else:
|
||||
return super().__getattribute__("pb_msg").__getattribute__(key)
|
||||
|
||||
def to_dict(self):
|
||||
return MessageToDict(
|
||||
self.pb_msg, preserving_proto_field_name=True, including_default_value_fields=True
|
||||
)
|
||||
|
||||
def to_json(self):
|
||||
return MessageToJson(
|
||||
self.pb_msg, preserving_proto_field_name=True, including_default_valud_fields=True
|
||||
)
|
||||
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.DEBUG
|
||||
@@ -90,42 +114,48 @@ class BaseEvent:
|
||||
raise Exception("code() not implemented for event")
|
||||
|
||||
|
||||
class EventInfo(Protocol):
|
||||
level: str
|
||||
name: str
|
||||
ts: str
|
||||
code: str
|
||||
|
||||
|
||||
class EventMsg(Protocol):
|
||||
info: pt.EventInfo
|
||||
data: BaseEvent
|
||||
info: EventInfo
|
||||
data: Message
|
||||
|
||||
|
||||
def msg_from_base_event(event: BaseEvent, level: EventLevel = None):
|
||||
|
||||
msg_class_name = f"{type(event).__name__}Msg"
|
||||
msg_cls = getattr(pt, msg_class_name)
|
||||
msg_cls = getattr(types_pb2, msg_class_name)
|
||||
|
||||
# level in EventInfo must be a string, not an EventLevel
|
||||
msg_level: str = level.value if level else event.level_tag().value
|
||||
assert msg_level is not None
|
||||
event_info = pt.EventInfo(
|
||||
level=msg_level,
|
||||
msg=event.message(),
|
||||
invocation_id=get_invocation_id(),
|
||||
extra=get_global_metadata_vars(),
|
||||
ts=datetime.utcnow(),
|
||||
pid=get_pid(),
|
||||
thread=get_thread_name(),
|
||||
code=event.code(),
|
||||
name=type(event).__name__,
|
||||
)
|
||||
new_event = msg_cls(data=event, info=event_info)
|
||||
event_info = {
|
||||
"level": msg_level,
|
||||
"msg": event.message(),
|
||||
"invocation_id": get_invocation_id(),
|
||||
"extra": get_global_metadata_vars(),
|
||||
"ts": get_json_string_utcnow(),
|
||||
"pid": get_pid(),
|
||||
"thread": get_thread_name(),
|
||||
"code": event.code(),
|
||||
"name": type(event).__name__,
|
||||
}
|
||||
new_event = ParseDict({"info": event_info}, msg_cls())
|
||||
new_event.data.CopyFrom(event.pb_msg)
|
||||
return new_event
|
||||
|
||||
|
||||
# DynamicLevel requires that the level be supplied on the
|
||||
# event construction call using the "info" function from functions.py
|
||||
@dataclass # type: ignore[misc]
|
||||
class DynamicLevel(BaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestLevel(BaseEvent):
|
||||
__test__ = False
|
||||
|
||||
@@ -133,54 +163,21 @@ class TestLevel(BaseEvent):
|
||||
return EventLevel.TEST
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class DebugLevel(BaseEvent):
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.DEBUG
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class InfoLevel(BaseEvent):
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.INFO
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class WarnLevel(BaseEvent):
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.WARN
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class ErrorLevel(BaseEvent):
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.ERROR
|
||||
|
||||
|
||||
# Included to ensure classes with str-type message members are initialized correctly.
|
||||
@dataclass # type: ignore[misc]
|
||||
class AdapterEventStringFunctor:
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
if not isinstance(self.base_msg, str):
|
||||
self.base_msg = str(self.base_msg)
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class EventStringFunctor:
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
if not isinstance(self.msg, str):
|
||||
self.msg = str(self.msg)
|
||||
|
||||
|
||||
# prevents an event from going to the file
|
||||
# This should rarely be used in core code. It is currently
|
||||
# only used in integration tests and for the 'clean' command.
|
||||
class NoFile:
|
||||
pass
|
||||
|
||||
|
||||
# prevents an event from going to stdout
|
||||
class NoStdOut:
|
||||
pass
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user