mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-21 17:21:28 +00:00
Compare commits
258 Commits
update-ind
...
er/test-sy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3d14a0bb3f | ||
|
|
31881d2a3b | ||
|
|
1dcdcd2f52 | ||
|
|
3de3b827bf | ||
|
|
8a8857a85c | ||
|
|
e4d5a4e777 | ||
|
|
b414ef2cc5 | ||
|
|
57e279cc1b | ||
|
|
2eb1a5c3ea | ||
|
|
dcc9a0ca29 | ||
|
|
892c545985 | ||
|
|
a8702b8374 | ||
|
|
1592987de8 | ||
|
|
710600546a | ||
|
|
0bf38ce294 | ||
|
|
459d156e85 | ||
|
|
95c090bed0 | ||
|
|
f2222d2621 | ||
|
|
97ffc37405 | ||
|
|
bf18b59845 | ||
|
|
88e953e8aa | ||
|
|
6076cf7114 | ||
|
|
a1757934ef | ||
|
|
6c61cb7f7a | ||
|
|
4b1f1c4029 | ||
|
|
7df04b0fe4 | ||
|
|
662101590d | ||
|
|
fc6167a2ee | ||
|
|
983cbb4f28 | ||
|
|
c9582c2323 | ||
|
|
03fdb4c157 | ||
|
|
afe25a99fe | ||
|
|
e32b8a90ac | ||
|
|
1472b86ee2 | ||
|
|
ff6745c795 | ||
|
|
fdfe03d561 | ||
|
|
1b7d9b5704 | ||
|
|
c3d87b89fb | ||
|
|
0f084e16ca | ||
|
|
3464be7f70 | ||
|
|
407f6caa1c | ||
|
|
ad575ec699 | ||
|
|
f582ac2488 | ||
|
|
f5f0735d00 | ||
|
|
3abf575fa6 | ||
|
|
a42303c3af | ||
|
|
6fccfe84ea | ||
|
|
fd6ec71dab | ||
|
|
ae957599e1 | ||
|
|
f080346227 | ||
|
|
2a75dd4683 | ||
|
|
945539e3ae | ||
|
|
84230ce333 | ||
|
|
35c09203ad | ||
|
|
1625eb059a | ||
|
|
2c43af897d | ||
|
|
6e1f64f8b4 | ||
|
|
e9a2b548cb | ||
|
|
89caa33fb4 | ||
|
|
30b8a92e38 | ||
|
|
b95f7a7f2c | ||
|
|
e451a371e6 | ||
|
|
81067d4fc4 | ||
|
|
3198ce4809 | ||
|
|
0c51985c83 | ||
|
|
e26af57989 | ||
|
|
bdf28d7eff | ||
|
|
289d2dd932 | ||
|
|
8a17a0d7e7 | ||
|
|
8c6bec4fb5 | ||
|
|
7f5abdc565 | ||
|
|
f714e84282 | ||
|
|
7f92c6e003 | ||
|
|
8de0229a04 | ||
|
|
dd77210756 | ||
|
|
8df5c96f3d | ||
|
|
6b5db1796f | ||
|
|
3224589fe7 | ||
|
|
b71ceb3166 | ||
|
|
4d4b05effc | ||
|
|
316ecfca28 | ||
|
|
d07bfda9df | ||
|
|
8ae689c674 | ||
|
|
bdb79e8626 | ||
|
|
f7b7935a97 | ||
|
|
3d96b4e36c | ||
|
|
7920b0e71d | ||
|
|
a0674db840 | ||
|
|
ba6c7baf1d | ||
|
|
8be063502b | ||
|
|
78c05718c5 | ||
|
|
d18f50bbb8 | ||
|
|
ffa75ca9ff | ||
|
|
8f847167fa | ||
|
|
cd6bb9e782 | ||
|
|
ef9abe6c06 | ||
|
|
40c350ff21 | ||
|
|
c7d8693f70 | ||
|
|
6743e32574 | ||
|
|
f6cdacc61e | ||
|
|
5db0b81da1 | ||
|
|
fc8eb820aa | ||
|
|
fc83f5edfa | ||
|
|
8248d1eb53 | ||
|
|
6b9c1da1ae | ||
|
|
7940ad5c78 | ||
|
|
3ec8fa79bd | ||
|
|
396cf2d683 | ||
|
|
87b1143a62 | ||
|
|
75a09621cd | ||
|
|
5e9f1b515f | ||
|
|
25a68a990c | ||
|
|
a86e2b4ffc | ||
|
|
94917432f9 | ||
|
|
d1857b39ca | ||
|
|
2ff3f20863 | ||
|
|
5e3d418264 | ||
|
|
5d32aa8b62 | ||
|
|
d8b1bf53f7 | ||
|
|
1076352293 | ||
|
|
1fe9c1bbfe | ||
|
|
41e4836c0f | ||
|
|
b590045b9f | ||
|
|
1fd4d2eae6 | ||
|
|
ac66f91351 | ||
|
|
359a2c0cc5 | ||
|
|
bbdb98fa5d | ||
|
|
a8d4ba2b4a | ||
|
|
09e973d24a | ||
|
|
730e40a867 | ||
|
|
a1e4753020 | ||
|
|
3ac20ce7a8 | ||
|
|
aa23af98e5 | ||
|
|
46da967115 | ||
|
|
db694731c9 | ||
|
|
7016cd3085 | ||
|
|
9ca10fbfd9 | ||
|
|
3308a4365e | ||
|
|
f8bfd32ed6 | ||
|
|
3e437a6734 | ||
|
|
9e633f6178 | ||
|
|
d182d06644 | ||
|
|
054c6fde37 | ||
|
|
4c326e40b5 | ||
|
|
8fe5ea1ee7 | ||
|
|
16f5023f4d | ||
|
|
c6b8f7e595 | ||
|
|
77aeb3ea68 | ||
|
|
1e20772d33 | ||
|
|
8ce2c46a2f | ||
|
|
aeaaedcaa1 | ||
|
|
6c111f2e31 | ||
|
|
139b9ac54f | ||
|
|
cc8541c05f | ||
|
|
ab500a9709 | ||
|
|
1d3d315249 | ||
|
|
b35ad46e3f | ||
|
|
c28cb92af5 | ||
|
|
b56d96df5e | ||
|
|
37d382c8e7 | ||
|
|
9b7f4ff842 | ||
|
|
555ff8091f | ||
|
|
98fddcf54f | ||
|
|
d652359c61 | ||
|
|
f7d21e012e | ||
|
|
e1fa461186 | ||
|
|
1153597970 | ||
|
|
09f9febc25 | ||
|
|
22181409f6 | ||
|
|
f25a474f75 | ||
|
|
3c55806203 | ||
|
|
bba020fcc0 | ||
|
|
84eb0ff672 | ||
|
|
3695698e22 | ||
|
|
9ca1bc5b4c | ||
|
|
5f66678f6d | ||
|
|
63262e93cb | ||
|
|
374412af53 | ||
|
|
47848b8ea8 | ||
|
|
3d09872a56 | ||
|
|
dfa7d06526 | ||
|
|
7f57dd5a30 | ||
|
|
56bfbeaedd | ||
|
|
1dd26e79af | ||
|
|
86223609dd | ||
|
|
21a46332f1 | ||
|
|
ff2726c3b5 | ||
|
|
014444dc18 | ||
|
|
25c2042dc9 | ||
|
|
0a160fc27a | ||
|
|
c598741262 | ||
|
|
f9c2b9398f | ||
|
|
cab6dabbc7 | ||
|
|
e1621ebc54 | ||
|
|
cd90d4493c | ||
|
|
560d151dcd | ||
|
|
229c537748 | ||
|
|
79ad0a3243 | ||
|
|
c668846404 | ||
|
|
c4958de166 | ||
|
|
33161a3035 | ||
|
|
471b816dcd | ||
|
|
bef2d20c21 | ||
|
|
2a26fabfdf | ||
|
|
4c7d922a6d | ||
|
|
b03291548a | ||
|
|
a7af3b3831 | ||
|
|
6e4564ab05 | ||
|
|
1aeff2c58f | ||
|
|
601fee0d5f | ||
|
|
88b8b10df1 | ||
|
|
4ea0e1007c | ||
|
|
a309283a7c | ||
|
|
b10fa79ae8 | ||
|
|
37e2725038 | ||
|
|
37fd299ad0 | ||
|
|
a94027acea | ||
|
|
b59c9075e2 | ||
|
|
c215697a02 | ||
|
|
d936a630c1 | ||
|
|
11ee2b9c42 | ||
|
|
64c59476f4 | ||
|
|
2bae05b8ed | ||
|
|
ca163c3d6e | ||
|
|
9a796aa202 | ||
|
|
51ff85bb2d | ||
|
|
d389ff1450 | ||
|
|
4415731da4 | ||
|
|
0fdc83af9d | ||
|
|
71a8a41104 | ||
|
|
da19d7ba9f | ||
|
|
1475abb1cb | ||
|
|
27b2f965dd | ||
|
|
100352d6b4 | ||
|
|
8ee8b2560a | ||
|
|
d4a6482091 | ||
|
|
8639290108 | ||
|
|
e699f5d042 | ||
|
|
e977b3eee5 | ||
|
|
c5be8e2a93 | ||
|
|
bff116dbed | ||
|
|
4df120e40e | ||
|
|
e53420c1d0 | ||
|
|
88ccc8a447 | ||
|
|
a98059967d | ||
|
|
b680c7ae95 | ||
|
|
a677abd5e8 | ||
|
|
8c850b58cb | ||
|
|
a34267f54b | ||
|
|
155482851a | ||
|
|
81386a7a43 | ||
|
|
d8e38c1a1d | ||
|
|
3e37d77780 | ||
|
|
e0783c2922 | ||
|
|
c2d4643f9d | ||
|
|
84456f50f6 | ||
|
|
fb10bb4aea | ||
|
|
366d4ad04a |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 1.9.0a1
|
current_version = 1.10.0a1
|
||||||
parse = (?P<major>[\d]+) # major version number
|
parse = (?P<major>[\d]+) # major version number
|
||||||
\.(?P<minor>[\d]+) # minor version number
|
\.(?P<minor>[\d]+) # minor version number
|
||||||
\.(?P<patch>[\d]+) # patch version number
|
\.(?P<patch>[\d]+) # patch version number
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Dependencies
|
|
||||||
body: Remove logbook dependency
|
|
||||||
time: 2024-05-09T09:37:17.745129-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "8027"
|
|
||||||
6
.changes/unreleased/Dependencies-20241112-163815.yaml
Normal file
6
.changes/unreleased/Dependencies-20241112-163815.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Upgrading dbt-semantic-interfaces to 0.8.3 for custom grain support in offset windows
|
||||||
|
time: 2024-11-12T16:38:15.351519-05:00
|
||||||
|
custom:
|
||||||
|
Author: WilliamDee
|
||||||
|
Issue: None
|
||||||
6
.changes/unreleased/Dependencies-20241118-001113.yaml
Normal file
6
.changes/unreleased/Dependencies-20241118-001113.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: "Dependencies"
|
||||||
|
body: "Bump codecov/codecov-action from 4 to 5"
|
||||||
|
time: 2024-11-18T00:11:13.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
Issue: 11009
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: serialize inferred primary key
|
|
||||||
time: 2024-05-06T17:56:42.757673-05:00
|
|
||||||
custom:
|
|
||||||
Author: dave-connors-3
|
|
||||||
Issue: "9824"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: 'Add unit_test: selection method'
|
|
||||||
time: 2024-05-07T16:27:17.047585-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10053"
|
|
||||||
6
.changes/unreleased/Features-20241104-120053.yaml
Normal file
6
.changes/unreleased/Features-20241104-120053.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add new hard_deletes="new_record" mode for snapshots.
|
||||||
|
time: 2024-11-04T12:00:53.95191-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "10235"
|
||||||
6
.changes/unreleased/Features-20241121-125630.yaml
Normal file
6
.changes/unreleased/Features-20241121-125630.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add `batch` context object to model jinja context
|
||||||
|
time: 2024-11-21T12:56:30.715473-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11025"
|
||||||
7
.changes/unreleased/Features-20241206-195308.yaml
Normal file
7
.changes/unreleased/Features-20241206-195308.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Ensure pre/post hooks only run on first/last batch respectively for microbatch
|
||||||
|
model batches
|
||||||
|
time: 2024-12-06T19:53:08.928793-06:00
|
||||||
|
custom:
|
||||||
|
Author: MichelleArk QMalcolm
|
||||||
|
Issue: 11094 11104
|
||||||
6
.changes/unreleased/Features-20241216-095435.yaml
Normal file
6
.changes/unreleased/Features-20241216-095435.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support "tags" in Saved Queries
|
||||||
|
time: 2024-12-16T09:54:35.327675-08:00
|
||||||
|
custom:
|
||||||
|
Author: theyostalservice
|
||||||
|
Issue: "11155"
|
||||||
6
.changes/unreleased/Features-20241217-171631.yaml
Normal file
6
.changes/unreleased/Features-20241217-171631.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Calculate source freshness via a SQL query
|
||||||
|
time: 2024-12-17T17:16:31.841076-08:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8797"
|
||||||
6
.changes/unreleased/Features-20241218-170729.yaml
Normal file
6
.changes/unreleased/Features-20241218-170729.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add freshness definition on model for adaptive job
|
||||||
|
time: 2024-12-18T17:07:29.55754-08:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "11123"
|
||||||
6
.changes/unreleased/Features-20250106-132829.yaml
Normal file
6
.changes/unreleased/Features-20250106-132829.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Meta config for dimensions measures and entities
|
||||||
|
time: 2025-01-06T13:28:29.176439-06:00
|
||||||
|
custom:
|
||||||
|
Author: DevonFulcher
|
||||||
|
Issue: None
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Remove unused check_new method
|
|
||||||
time: 2023-06-01T20:41:57.556342+02:00
|
|
||||||
custom:
|
|
||||||
Author: kevinneville
|
|
||||||
Issue: "7586"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: 'Restore previous behavior for --favor-state: only favor defer_relation if not
|
|
||||||
selected in current command"'
|
|
||||||
time: 2024-05-08T15:11:27.510912+02:00
|
|
||||||
custom:
|
|
||||||
Author: jtcohen6
|
|
||||||
Issue: "10107"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Unit test fixture (csv) returns null for empty value
|
|
||||||
time: 2024-05-09T09:14:11.772709-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "9881"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix json format log and --quiet for ls and jinja print by converting print call
|
|
||||||
to fire events
|
|
||||||
time: 2024-05-16T15:39:13.896723-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "8756"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Add resource type to saved_query
|
|
||||||
time: 2024-05-16T22:35:10.287514-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "10168"
|
|
||||||
6
.changes/unreleased/Fixes-20240822-122132.yaml
Normal file
6
.changes/unreleased/Fixes-20240822-122132.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: dbt retry does not respect --threads
|
||||||
|
time: 2024-08-22T12:21:32.358066+05:30
|
||||||
|
custom:
|
||||||
|
Author: donjin-master
|
||||||
|
Issue: "10584"
|
||||||
6
.changes/unreleased/Fixes-20241025-104339.yaml
Normal file
6
.changes/unreleased/Fixes-20241025-104339.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: update adapter version messages
|
||||||
|
time: 2024-10-25T10:43:39.274723-05:00
|
||||||
|
custom:
|
||||||
|
Author: dave-connors-3
|
||||||
|
Issue: "10230"
|
||||||
6
.changes/unreleased/Fixes-20241121-181739.yaml
Normal file
6
.changes/unreleased/Fixes-20241121-181739.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Catch DbtRuntimeError for hooks
|
||||||
|
time: 2024-11-21T18:17:39.753235Z
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "11012"
|
||||||
6
.changes/unreleased/Fixes-20241128-162936.yaml
Normal file
6
.changes/unreleased/Fixes-20241128-162936.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Access DBUG flag more consistently with the rest of the codebase in ManifestLoader
|
||||||
|
time: 2024-11-28T16:29:36.236729+01:00
|
||||||
|
custom:
|
||||||
|
Author: Threynaud
|
||||||
|
Issue: "11068"
|
||||||
6
.changes/unreleased/Fixes-20241204-100429.yaml
Normal file
6
.changes/unreleased/Fixes-20241204-100429.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Improve the performance characteristics of add_test_edges()
|
||||||
|
time: 2024-12-04T10:04:29.096231-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "10950"
|
||||||
6
.changes/unreleased/Fixes-20241205-145307.yaml
Normal file
6
.changes/unreleased/Fixes-20241205-145307.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Implement partial parsing for singular data test configs in yaml files
|
||||||
|
time: 2024-12-05T14:53:07.295536-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10801"
|
||||||
6
.changes/unreleased/Fixes-20241209-113806.yaml
Normal file
6
.changes/unreleased/Fixes-20241209-113806.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix debug log messages for microbatch batch execution information
|
||||||
|
time: 2024-12-09T11:38:06.972743-06:00
|
||||||
|
custom:
|
||||||
|
Author: MichelleArk QMalcolm
|
||||||
|
Issue: "11111"
|
||||||
6
.changes/unreleased/Fixes-20241209-133317.yaml
Normal file
6
.changes/unreleased/Fixes-20241209-133317.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix running of extra "last" batch when there is only one batch
|
||||||
|
time: 2024-12-09T13:33:17.253326-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11112"
|
||||||
6
.changes/unreleased/Fixes-20241209-150711.yaml
Normal file
6
.changes/unreleased/Fixes-20241209-150711.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix interpretation of `PartialSuccess` to result in non-zero exit code
|
||||||
|
time: 2024-12-09T15:07:11.391313-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11114"
|
||||||
6
.changes/unreleased/Fixes-20241212-113611.yaml
Normal file
6
.changes/unreleased/Fixes-20241212-113611.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Warn about invalid usages of `concurrent_batches` config
|
||||||
|
time: 2024-12-12T11:36:11.451962-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11122"
|
||||||
6
.changes/unreleased/Fixes-20241216-134645.yaml
Normal file
6
.changes/unreleased/Fixes-20241216-134645.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Error writing generic test at run time
|
||||||
|
time: 2024-12-16T13:46:45.936573-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "11110"
|
||||||
6
.changes/unreleased/Fixes-20241217-154848.yaml
Normal file
6
.changes/unreleased/Fixes-20241217-154848.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Run check_modified_contract for state:modified
|
||||||
|
time: 2024-12-17T15:48:48.053054-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "11034"
|
||||||
6
.changes/unreleased/Fixes-20241218-112640.yaml
Normal file
6
.changes/unreleased/Fixes-20241218-112640.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix unrendered_config for tests from dbt_project.yml
|
||||||
|
time: 2024-12-18T11:26:40.270022-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "11146"
|
||||||
6
.changes/unreleased/Fixes-20250102-140543.yaml
Normal file
6
.changes/unreleased/Fixes-20250102-140543.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Make partial parsing reparse referencing nodes of newly versioned models.
|
||||||
|
time: 2025-01-02T14:05:43.629959-05:00
|
||||||
|
custom:
|
||||||
|
Author: d-cole
|
||||||
|
Issue: "8872"
|
||||||
6
.changes/unreleased/Fixes-20250107-173719.yaml
Normal file
6
.changes/unreleased/Fixes-20250107-173719.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Ensure warning about microbatch lacking filter inputs is always fired
|
||||||
|
time: 2025-01-07T17:37:19.373261-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11159"
|
||||||
6
.changes/unreleased/Fixes-20250109-123309.yaml
Normal file
6
.changes/unreleased/Fixes-20250109-123309.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix microbatch dbt list --output json
|
||||||
|
time: 2025-01-09T12:33:09.958795+01:00
|
||||||
|
custom:
|
||||||
|
Author: internetcoffeephone
|
||||||
|
Issue: 10556 11098
|
||||||
6
.changes/unreleased/Fixes-20250110-155824.yaml
Normal file
6
.changes/unreleased/Fixes-20250110-155824.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix for custom fields in generic test config for not_null and unique tests
|
||||||
|
time: 2025-01-10T15:58:24.479245-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "11208"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Security
|
|
||||||
body: Explicitly bind to localhost in docs serve
|
|
||||||
time: 2024-05-22T09:45:40.748185-04:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx michelleark
|
|
||||||
Issue: "10209"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Clear error message for Private package in dbt-core
|
|
||||||
time: 2024-05-02T15:44:30.713097-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "10083"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Enable use of context in serialization
|
|
||||||
time: 2024-05-06T14:55:11.1812-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10093"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Make RSS high water mark measurement more accurate on Linux
|
|
||||||
time: 2024-05-19T15:59:46.700842315-04:00
|
|
||||||
custom:
|
|
||||||
Author: peterallenwebb
|
|
||||||
Issue: "10177"
|
|
||||||
6
.changes/unreleased/Under the Hood-20241202-164715.yaml
Normal file
6
.changes/unreleased/Under the Hood-20241202-164715.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Create a no-op exposure runner
|
||||||
|
time: 2024-12-02T16:47:15.766574Z
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: ' '
|
||||||
7
.changes/unreleased/Under the Hood-20241205-143144.yaml
Normal file
7
.changes/unreleased/Under the Hood-20241205-143144.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Improve selection peformance by optimizing the select_children() and select_parents()
|
||||||
|
functions.
|
||||||
|
time: 2024-12-05T14:31:44.584216-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "11099"
|
||||||
7
.changes/unreleased/Under the Hood-20250107-123955.yaml
Normal file
7
.changes/unreleased/Under the Hood-20250107-123955.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Change exception type from DbtInternalException to UndefinedMacroError when
|
||||||
|
macro not found in 'run operation' command
|
||||||
|
time: 2025-01-07T12:39:55.234321-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "11192"
|
||||||
6
.changes/unreleased/Under the Hood-20250107-205838.yaml
Normal file
6
.changes/unreleased/Under the Hood-20250107-205838.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Create LogNodeResult event
|
||||||
|
time: 2025-01-07T20:58:38.821036Z
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: ' '
|
||||||
6
.changes/unreleased/Under the Hood-20250110-202057.yaml
Normal file
6
.changes/unreleased/Under the Hood-20250110-202057.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Fix error counts for exposures
|
||||||
|
time: 2025-01-10T20:20:57.01632Z
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: ' '
|
||||||
6
.changes/unreleased/Under the Hood-20250117-152215.yaml
Normal file
6
.changes/unreleased/Under the Hood-20250117-152215.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Misc fixes for group info in logging
|
||||||
|
time: 2025-01-17T15:22:15.497485Z
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: '11218'
|
||||||
1
.flake8
1
.flake8
@@ -7,6 +7,7 @@ ignore =
|
|||||||
W503 # makes Flake8 work like black
|
W503 # makes Flake8 work like black
|
||||||
W504
|
W504
|
||||||
E203 # makes Flake8 work like black
|
E203 # makes Flake8 work like black
|
||||||
|
E704 # makes Flake8 work like black
|
||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test/
|
exclude = test/
|
||||||
|
|||||||
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
Normal file
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: 📄 Code docs
|
||||||
|
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
|
||||||
|
title: "[Code docs] <title>"
|
||||||
|
labels: ["triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this code docs issue!
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Please describe the issue and your proposals.
|
||||||
|
description: |
|
||||||
|
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||||
|
|
||||||
|
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
|
- name: Documentation
|
||||||
|
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
|
||||||
|
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
|
||||||
- name: Ask the community for help
|
- name: Ask the community for help
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||||
about: Need help troubleshooting? Check out our guide on how to ask
|
about: Need help troubleshooting? Check out our guide on how to ask
|
||||||
|
|||||||
11
.github/actions/setup-postgres-linux/action.yml
vendored
11
.github/actions/setup-postgres-linux/action.yml
vendored
@@ -5,6 +5,15 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
sudo systemctl start postgresql.service
|
sudo apt-get --purge remove postgresql postgresql-*
|
||||||
|
sudo apt update -y
|
||||||
|
sudo apt install gnupg2 wget vim -y
|
||||||
|
sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||||
|
curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg
|
||||||
|
sudo apt update -y
|
||||||
|
sudo apt install postgresql-16
|
||||||
|
sudo apt-get -y install postgresql postgresql-contrib
|
||||||
|
sudo systemctl start postgresql
|
||||||
|
sudo systemctl enable postgresql
|
||||||
pg_isready
|
pg_isready
|
||||||
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
||||||
|
|||||||
@@ -5,7 +5,9 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
brew services start postgresql
|
brew install postgresql@16
|
||||||
|
brew link postgresql@16 --force
|
||||||
|
brew services start postgresql@16
|
||||||
echo "Check PostgreSQL service is running"
|
echo "Check PostgreSQL service is running"
|
||||||
i=10
|
i=10
|
||||||
COMMAND='pg_isready'
|
COMMAND='pg_isready'
|
||||||
|
|||||||
@@ -5,8 +5,22 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- shell: pwsh
|
- shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
$pgService = Get-Service -Name postgresql*
|
Write-Host -Object "Installing PostgreSQL 16 as windows service..."
|
||||||
|
$installerArgs = @("--install_runtimes 0", "--superpassword root", "--enable_acledit 1", "--unattendedmodeui none", "--mode unattended")
|
||||||
|
$filePath = Invoke-DownloadWithRetry -Url "https://get.enterprisedb.com/postgresql/postgresql-16.1-1-windows-x64.exe" -Path "$env:PGROOT/postgresql-16.1-1-windows-x64.exe"
|
||||||
|
Start-Process -FilePath $filePath -ArgumentList $installerArgs -Wait -PassThru
|
||||||
|
|
||||||
|
Write-Host -Object "Validating PostgreSQL 16 Install..."
|
||||||
|
Get-Service -Name postgresql*
|
||||||
|
$pgReady = Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||||
|
$exitCode = $pgReady.ExitCode
|
||||||
|
if ($exitCode -ne 0) {
|
||||||
|
Write-Host -Object "PostgreSQL is not ready. Exitcode: $exitCode"
|
||||||
|
exit $exitCode
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host -Object "Starting PostgreSQL 16 Service..."
|
||||||
|
$pgService = Get-Service -Name postgresql-x64-16
|
||||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
|
||||||
$env:Path += ";$env:PGBIN"
|
$env:Path += ";$env:PGBIN"
|
||||||
bash ${{ github.action_path }}/setup_db.sh
|
bash ${{ github.action_path }}/setup_db.sh
|
||||||
|
|||||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
resolves #
|
Resolves #
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above if applicable.
|
Include the number of the issue addressed by this PR above, if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
@@ -26,8 +26,8 @@ resolves #
|
|||||||
|
|
||||||
### Checklist
|
### Checklist
|
||||||
|
|
||||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
||||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
||||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
||||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
||||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
||||||
|
|||||||
50
.github/workflows/auto-respond-bug-reports.yml
vendored
Normal file
50
.github/workflows/auto-respond-bug-reports.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# **what?**
|
||||||
|
# Check if the an issue is opened near or during an extended holiday period.
|
||||||
|
# If so, post an automatically-generated comment about the holiday for bug reports.
|
||||||
|
# Also provide specific information to customers of dbt Cloud.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Explain why responses will be delayed during our holiday period.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when new issues are opened.
|
||||||
|
|
||||||
|
name: Auto-Respond to Bug Reports During Holiday Period
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto-response:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check if current date is within holiday period
|
||||||
|
id: date-check
|
||||||
|
run: |
|
||||||
|
current_date=$(date -u +"%Y-%m-%d")
|
||||||
|
start_date="2024-12-23"
|
||||||
|
end_date="2025-01-05"
|
||||||
|
|
||||||
|
if [[ "$current_date" < "$start_date" || "$current_date" > "$end_date" ]]; then
|
||||||
|
echo "outside_holiday=true" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "outside_holiday=false" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Post comment
|
||||||
|
if: ${{ env.outside_holiday == 'false' && contains(github.event.issue.labels.*.name, 'bug') }}
|
||||||
|
run: |
|
||||||
|
gh issue comment ${{ github.event.issue.number }} --repo ${{ github.repository }} --body "Thank you for your bug report! Our team is will be out of the office for [Christmas and our Global Week of Rest](https://handbook.getdbt.com/docs/time_off#2024-us-holidays), from December 25, 2024, through January 3, 2025.
|
||||||
|
|
||||||
|
We will review your issue as soon as possible after returning.
|
||||||
|
Thank you for your understanding, and happy holidays! 🎄🎉
|
||||||
|
|
||||||
|
If you are a customer of dbt Cloud, please contact our Customer Support team via the dbt Cloud web interface or email **support@dbtlabs.com**."
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
2
.github/workflows/check-artifact-changes.yml
vendored
2
.github/workflows/check-artifact-changes.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
||||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
||||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR."
|
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: CI check passed
|
- name: CI check passed
|
||||||
|
|||||||
4
.github/workflows/docs-issue.yml
vendored
4
.github/workflows/docs-issue.yml
vendored
@@ -36,6 +36,6 @@ jobs:
|
|||||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||||
with:
|
with:
|
||||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
issue_title: "[Core] Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
||||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated.\n Originating from this issue: https://github.com/dbt-labs/dbt-core/issues/${{ github.event.issue.number }}"
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|||||||
15
.github/workflows/main.yml
vendored
15
.github/workflows/main.yml
vendored
@@ -52,13 +52,14 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
make dev
|
make dev
|
||||||
|
make dev_req
|
||||||
mypy --version
|
mypy --version
|
||||||
dbt --version
|
dbt --version
|
||||||
|
|
||||||
@@ -74,7 +75,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
|
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TOXENV: "unit"
|
TOXENV: "unit"
|
||||||
@@ -111,7 +112,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload Unit Test Coverage to Codecov
|
- name: Upload Unit Test Coverage to Codecov
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: unit
|
flags: unit
|
||||||
@@ -139,7 +140,7 @@ jobs:
|
|||||||
- name: generate include
|
- name: generate include
|
||||||
id: generate-include
|
id: generate-include
|
||||||
run: |
|
run: |
|
||||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-12"' )
|
INCLUDE=('"python-version":"3.9","os":"windows-latest"' '"python-version":"3.9","os":"macos-14"' )
|
||||||
INCLUDE_GROUPS="["
|
INCLUDE_GROUPS="["
|
||||||
for include in ${INCLUDE[@]}; do
|
for include in ${INCLUDE[@]}; do
|
||||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
@@ -161,7 +162,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
|
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||||
os: [ubuntu-20.04]
|
os: [ubuntu-20.04]
|
||||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||||
@@ -229,7 +230,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload Integration Test Coverage to Codecov
|
- name: Upload Integration Test Coverage to Codecov
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: integration
|
flags: integration
|
||||||
@@ -263,7 +264,7 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/model_performance.yml
vendored
2
.github/workflows/model_performance.yml
vendored
@@ -150,7 +150,7 @@ jobs:
|
|||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.8"
|
python-version: "3.9"
|
||||||
|
|
||||||
- name: Install dbt
|
- name: Install dbt
|
||||||
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
||||||
|
|||||||
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@@ -247,3 +247,24 @@ jobs:
|
|||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||||
|
|
||||||
|
testing-slack-notification:
|
||||||
|
# sends notifications to #slackbot-test
|
||||||
|
name: Testing - Slack Notification
|
||||||
|
if: ${{ failure() && inputs.test_run && !inputs.nightly_release }}
|
||||||
|
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
bump-version-generate-changelog,
|
||||||
|
build-test-package,
|
||||||
|
github-release,
|
||||||
|
pypi-release,
|
||||||
|
docker-release,
|
||||||
|
]
|
||||||
|
|
||||||
|
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||||
|
with:
|
||||||
|
status: "failure"
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }}
|
||||||
|
|||||||
4
.github/workflows/schema-check.yml
vendored
4
.github/workflows/schema-check.yml
vendored
@@ -30,14 +30,14 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
checking-schemas:
|
checking-schemas:
|
||||||
name: "Checking schemas"
|
name: "Post-merge schema changes required"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.9
|
||||||
|
|
||||||
- name: Checkout dbt repo
|
- name: Checkout dbt repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ jobs:
|
|||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.8"
|
python-version: "3.9"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
5
.github/workflows/test-repeater.yml
vendored
5
.github/workflows/test-repeater.yml
vendored
@@ -27,7 +27,6 @@ on:
|
|||||||
description: 'Version of Python to Test Against'
|
description: 'Version of Python to Test Against'
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- '3.8'
|
|
||||||
- '3.9'
|
- '3.9'
|
||||||
- '3.10'
|
- '3.10'
|
||||||
- '3.11'
|
- '3.11'
|
||||||
@@ -36,7 +35,7 @@ on:
|
|||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- 'ubuntu-latest'
|
- 'ubuntu-latest'
|
||||||
- 'macos-12'
|
- 'macos-14'
|
||||||
- 'windows-latest'
|
- 'windows-latest'
|
||||||
num_runs_per_batch:
|
num_runs_per_batch:
|
||||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||||
@@ -101,7 +100,7 @@ jobs:
|
|||||||
|
|
||||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||||
- name: "Set up postgres (macos)"
|
- name: "Set up postgres (macos)"
|
||||||
if: inputs.os == 'macos-12'
|
if: inputs.os == 'macos-14'
|
||||||
uses: ./.github/actions/setup-postgres-macos
|
uses: ./.github/actions/setup-postgres-macos
|
||||||
|
|
||||||
- name: "Set up postgres (windows)"
|
- name: "Set up postgres (windows)"
|
||||||
|
|||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -57,6 +57,9 @@ test.env
|
|||||||
makefile.test.env
|
makefile.test.env
|
||||||
*.pytest_cache/
|
*.pytest_cache/
|
||||||
|
|
||||||
|
# Unit test artifacts
|
||||||
|
index.html
|
||||||
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
@@ -105,3 +108,6 @@ venv/
|
|||||||
|
|
||||||
# poetry
|
# poetry
|
||||||
poetry.lock
|
poetry.lock
|
||||||
|
|
||||||
|
# asdf
|
||||||
|
.tool-versions
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
[settings]
|
[settings]
|
||||||
profile=black
|
profile=black
|
||||||
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
||||||
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interface
|
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||||
|
|
||||||
# Force all unspecified python hooks to run python 3.8
|
# Force all unspecified python hooks to run python 3.9
|
||||||
default_language_version:
|
default_language_version:
|
||||||
python: python3
|
python: python3
|
||||||
|
|
||||||
@@ -15,16 +15,19 @@ repos:
|
|||||||
args: [--unsafe]
|
args: [--unsafe]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
exclude: schemas/dbt/manifest/
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude_types:
|
exclude_types:
|
||||||
- "markdown"
|
- "markdown"
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
rev: 5.12.0
|
# rev must match what's in dev-requirements.txt
|
||||||
|
rev: 5.13.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.3.0
|
# rev must match what's in dev-requirements.txt
|
||||||
|
rev: 24.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- id: black
|
- id: black
|
||||||
@@ -34,6 +37,7 @@ repos:
|
|||||||
- "--check"
|
- "--check"
|
||||||
- "--diff"
|
- "--diff"
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
|
# rev must match what's in dev-requirements.txt
|
||||||
rev: 4.0.1
|
rev: 4.0.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
@@ -41,6 +45,7 @@ repos:
|
|||||||
alias: flake8-check
|
alias: flake8-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
# rev must match what's in dev-requirements.txt
|
||||||
rev: v1.4.1
|
rev: v1.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
||||||
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
||||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
|
|||||||
@@ -170,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# run all unit tests in a file
|
# run all unit tests in a file
|
||||||
python3 -m pytest tests/unit/test_base_column.py
|
python3 -m pytest tests/unit/test_invocation_id.py
|
||||||
# run a specific unit test
|
# run a specific unit test
|
||||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id
|
||||||
# run specific Postgres functional tests
|
# run specific Postgres functional tests
|
||||||
python3 -m pytest tests/functional/sources
|
python3 -m pytest tests/functional/sources
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -33,9 +33,6 @@ RUN apt-get update \
|
|||||||
python-is-python3 \
|
python-is-python3 \
|
||||||
python-dev-is-python3 \
|
python-dev-is-python3 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
python3.8 \
|
|
||||||
python3.8-dev \
|
|
||||||
python3.8-venv \
|
|
||||||
python3.9 \
|
python3.9 \
|
||||||
python3.9-dev \
|
python3.9-dev \
|
||||||
python3.9-venv \
|
python3.9-venv \
|
||||||
|
|||||||
4
Makefile
4
Makefile
@@ -144,3 +144,7 @@ help: ## Show this help message.
|
|||||||
@echo
|
@echo
|
||||||
@echo 'options:'
|
@echo 'options:'
|
||||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||||
|
|
||||||
|
.PHONY: json_schema
|
||||||
|
json_schema: ## Update generated JSON schema using code changes.
|
||||||
|
scripts/collect-artifact-schema.py --path schemas
|
||||||
|
|||||||
26
codecov.yml
26
codecov.yml
@@ -1,6 +1,7 @@
|
|||||||
ignore:
|
ignore:
|
||||||
- ".github"
|
- ".github"
|
||||||
- ".changes"
|
- ".changes"
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
@@ -11,3 +12,28 @@ coverage:
|
|||||||
default:
|
default:
|
||||||
target: auto
|
target: auto
|
||||||
threshold: 80%
|
threshold: 80%
|
||||||
|
|
||||||
|
comment:
|
||||||
|
layout: "header, diff, flags, components" # show component info in the PR comment
|
||||||
|
|
||||||
|
component_management:
|
||||||
|
default_rules: # default rules that will be inherited by all components
|
||||||
|
statuses:
|
||||||
|
- type: project # in this case every component that doens't have a status defined will have a project type one
|
||||||
|
target: auto
|
||||||
|
threshold: 0.1%
|
||||||
|
- type: patch
|
||||||
|
target: 80%
|
||||||
|
individual_components:
|
||||||
|
- component_id: unittests
|
||||||
|
name: "Unit Tests"
|
||||||
|
flag_regexes:
|
||||||
|
- "unit"
|
||||||
|
statuses:
|
||||||
|
- type: patch
|
||||||
|
target: 80%
|
||||||
|
threshold: 5%
|
||||||
|
- component_id: integrationtests
|
||||||
|
name: "Integration Tests"
|
||||||
|
flag_regexes:
|
||||||
|
- "integration"
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
## The following are individual files in this directory.
|
## The following are individual files in this directory.
|
||||||
|
|
||||||
### compilation.py
|
### compilation.py
|
||||||
|
Testing
|
||||||
|
|
||||||
### constants.py
|
### constants.py
|
||||||
|
|
||||||
|
|||||||
@@ -29,6 +29,10 @@ All existing resources are defined under `dbt/artifacts/resources/v1`.
|
|||||||
|
|
||||||
## Making changes to dbt/artifacts
|
## Making changes to dbt/artifacts
|
||||||
|
|
||||||
|
### All changes
|
||||||
|
|
||||||
|
All changes to any fields will require a manual update to [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) to ensure live checking continues to work.
|
||||||
|
|
||||||
### Non-breaking changes
|
### Non-breaking changes
|
||||||
|
|
||||||
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
||||||
@@ -42,9 +46,9 @@ These types of minor, non-breaking changes are tested by [tests/unit/artifacts/t
|
|||||||
|
|
||||||
#### Updating [schemas.getdbt.com](https://schemas.getdbt.com)
|
#### Updating [schemas.getdbt.com](https://schemas.getdbt.com)
|
||||||
Non-breaking changes to artifact schemas require an update to the corresponding jsonschemas published to [schemas.getdbt.com](https://schemas.getdbt.com), which are defined in https://github.com/dbt-labs/schemas.getdbt.com. To do so:
|
Non-breaking changes to artifact schemas require an update to the corresponding jsonschemas published to [schemas.getdbt.com](https://schemas.getdbt.com), which are defined in https://github.com/dbt-labs/schemas.getdbt.com. To do so:
|
||||||
|
Note this must be done AFTER the core pull request is merged, otherwise we may end up with unresolvable conflicts and schemas that are invalid prior to base pull request merge. You may create the schemas.getdbt.com pull request prior to merging the base pull request, but do not merge until afterward.
|
||||||
1. Create a PR in https://github.com/dbt-labs/schemas.getdbt.com which reflects the schema changes to the artifact. The schema can be updated in-place for non-breaking changes. Example PR: https://github.com/dbt-labs/schemas.getdbt.com/pull/39
|
1. Create a PR in https://github.com/dbt-labs/schemas.getdbt.com which reflects the schema changes to the artifact. The schema can be updated in-place for non-breaking changes. Example PR: https://github.com/dbt-labs/schemas.getdbt.com/pull/39
|
||||||
2. Merge the https://github.com/dbt-labs/schemas.getdbt.com PR
|
2. Merge the https://github.com/dbt-labs/schemas.getdbt.com PR
|
||||||
3. Observe the `Artifact Schema Check` CI check pass on the `dbt-core` PR that updates the artifact schemas, and merge the `dbt-core` PR!
|
|
||||||
|
|
||||||
Note: Although `jsonschema` validation using the schemas in [schemas.getdbt.com](https://schemas.getdbt.com) is not encouraged or formally supported, `jsonschema` validation should still continue to work once the schemas are updated because they are forward-compatible and can therefore be used to validate previous minor versions of the schema.
|
Note: Although `jsonschema` validation using the schemas in [schemas.getdbt.com](https://schemas.getdbt.com) is not encouraged or formally supported, `jsonschema` validation should still continue to work once the schemas are updated because they are forward-compatible and can therefore be used to validate previous minor versions of the schema.
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsO
|
|||||||
from dbt.artifacts.resources.v1.metric import (
|
from dbt.artifacts.resources.v1.metric import (
|
||||||
ConstantPropertyInput,
|
ConstantPropertyInput,
|
||||||
ConversionTypeParams,
|
ConversionTypeParams,
|
||||||
|
CumulativeTypeParams,
|
||||||
Metric,
|
Metric,
|
||||||
MetricConfig,
|
MetricConfig,
|
||||||
MetricInput,
|
MetricInput,
|
||||||
@@ -45,7 +46,12 @@ from dbt.artifacts.resources.v1.metric import (
|
|||||||
MetricTimeWindow,
|
MetricTimeWindow,
|
||||||
MetricTypeParams,
|
MetricTypeParams,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
from dbt.artifacts.resources.v1.model import (
|
||||||
|
Model,
|
||||||
|
ModelConfig,
|
||||||
|
ModelFreshness,
|
||||||
|
TimeSpine,
|
||||||
|
)
|
||||||
from dbt.artifacts.resources.v1.owner import Owner
|
from dbt.artifacts.resources.v1.owner import Owner
|
||||||
from dbt.artifacts.resources.v1.saved_query import (
|
from dbt.artifacts.resources.v1.saved_query import (
|
||||||
Export,
|
Export,
|
||||||
|
|||||||
@@ -68,3 +68,10 @@ class TimePeriod(StrEnum):
|
|||||||
|
|
||||||
def plural(self) -> str:
|
def plural(self) -> str:
|
||||||
return str(self) + "s"
|
return str(self) + "s"
|
||||||
|
|
||||||
|
|
||||||
|
class BatchSize(StrEnum):
|
||||||
|
hour = "hour"
|
||||||
|
day = "day"
|
||||||
|
month = "month"
|
||||||
|
year = "year"
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
|||||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||||
from dbt_common.contracts.util import Mergeable
|
from dbt_common.contracts.util import Mergeable
|
||||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||||
|
|
||||||
NodeVersion = Union[str, float]
|
NodeVersion = Union[str, float]
|
||||||
|
|
||||||
@@ -66,6 +67,7 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
|||||||
quote: Optional[bool] = None
|
quote: Optional[bool] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
granularity: Optional[TimeGranularity] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -192,6 +194,7 @@ class ParsedResource(ParsedResourceMandatory):
|
|||||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||||
created_at: float = field(default_factory=lambda: time.time())
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
unrendered_config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||||
relation_name: Optional[str] = None
|
relation_name: Optional[str] = None
|
||||||
raw_code: str = ""
|
raw_code: str = ""
|
||||||
|
|
||||||
@@ -199,6 +202,8 @@ class ParsedResource(ParsedResourceMandatory):
|
|||||||
dct = super().__post_serialize__(dct, context)
|
dct = super().__post_serialize__(dct, context)
|
||||||
if context and context.get("artifact") and "config_call_dict" in dct:
|
if context and context.get("artifact") and "config_call_dict" in dct:
|
||||||
del dct["config_call_dict"]
|
del dct["config_call_dict"]
|
||||||
|
if context and context.get("artifact") and "unrendered_config_call_dict" in dct:
|
||||||
|
del dct["unrendered_config_call_dict"]
|
||||||
return dct
|
return dct
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -80,6 +80,9 @@ class NodeConfig(NodeAndTestConfig):
|
|||||||
# 'mergebehavior' dictionary
|
# 'mergebehavior' dictionary
|
||||||
materialized: str = "view"
|
materialized: str = "view"
|
||||||
incremental_strategy: Optional[str] = None
|
incremental_strategy: Optional[str] = None
|
||||||
|
batch_size: Any = None
|
||||||
|
lookback: Any = 1
|
||||||
|
begin: Any = None
|
||||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||||
post_hook: List[Hook] = field(
|
post_hook: List[Hook] = field(
|
||||||
default_factory=list,
|
default_factory=list,
|
||||||
@@ -122,6 +125,8 @@ class NodeConfig(NodeAndTestConfig):
|
|||||||
default_factory=ContractConfig,
|
default_factory=ContractConfig,
|
||||||
metadata=MergeBehavior.Update.meta(),
|
metadata=MergeBehavior.Update.meta(),
|
||||||
)
|
)
|
||||||
|
event_time: Any = None
|
||||||
|
concurrent_batches: Any = None
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||||
|
|||||||
@@ -2,13 +2,6 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Literal, Optional
|
from typing import Any, Dict, List, Literal, Optional
|
||||||
|
|
||||||
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
|
||||||
from dbt_semantic_interfaces.type_enums import (
|
|
||||||
ConversionCalculationType,
|
|
||||||
MetricType,
|
|
||||||
TimeGranularity,
|
|
||||||
)
|
|
||||||
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
@@ -18,6 +11,13 @@ from dbt.artifacts.resources.v1.semantic_layer_components import (
|
|||||||
)
|
)
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
||||||
|
from dbt_semantic_interfaces.type_enums import (
|
||||||
|
ConversionCalculationType,
|
||||||
|
MetricType,
|
||||||
|
PeriodAggregation,
|
||||||
|
TimeGranularity,
|
||||||
|
)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The following classes are dataclasses which are used to construct the Metric
|
The following classes are dataclasses which are used to construct the Metric
|
||||||
@@ -46,7 +46,15 @@ class MetricInputMeasure(dbtClassMixin):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class MetricTimeWindow(dbtClassMixin):
|
class MetricTimeWindow(dbtClassMixin):
|
||||||
count: int
|
count: int
|
||||||
granularity: TimeGranularity
|
granularity: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
def window_string(self) -> str: # noqa: D
|
||||||
|
return f"{self.count} {self.granularity}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_standard_granularity(self) -> bool: # noqa: D
|
||||||
|
return self.granularity.casefold() in {item.value.casefold() for item in TimeGranularity}
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -55,7 +63,7 @@ class MetricInput(dbtClassMixin):
|
|||||||
filter: Optional[WhereFilterIntersection] = None
|
filter: Optional[WhereFilterIntersection] = None
|
||||||
alias: Optional[str] = None
|
alias: Optional[str] = None
|
||||||
offset_window: Optional[MetricTimeWindow] = None
|
offset_window: Optional[MetricTimeWindow] = None
|
||||||
offset_to_grain: Optional[TimeGranularity] = None
|
offset_to_grain: Optional[str] = None
|
||||||
|
|
||||||
def as_reference(self) -> MetricReference:
|
def as_reference(self) -> MetricReference:
|
||||||
return MetricReference(element_name=self.name)
|
return MetricReference(element_name=self.name)
|
||||||
@@ -80,6 +88,13 @@ class ConversionTypeParams(dbtClassMixin):
|
|||||||
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CumulativeTypeParams(dbtClassMixin):
|
||||||
|
window: Optional[MetricTimeWindow] = None
|
||||||
|
grain_to_date: Optional[str] = None
|
||||||
|
period_agg: PeriodAggregation = PeriodAggregation.FIRST
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MetricTypeParams(dbtClassMixin):
|
class MetricTypeParams(dbtClassMixin):
|
||||||
measure: Optional[MetricInputMeasure] = None
|
measure: Optional[MetricInputMeasure] = None
|
||||||
@@ -88,9 +103,12 @@ class MetricTypeParams(dbtClassMixin):
|
|||||||
denominator: Optional[MetricInput] = None
|
denominator: Optional[MetricInput] = None
|
||||||
expr: Optional[str] = None
|
expr: Optional[str] = None
|
||||||
window: Optional[MetricTimeWindow] = None
|
window: Optional[MetricTimeWindow] = None
|
||||||
grain_to_date: Optional[TimeGranularity] = None
|
grain_to_date: Optional[TimeGranularity] = (
|
||||||
|
None # legacy, use cumulative_type_params.grain_to_date
|
||||||
|
)
|
||||||
metrics: Optional[List[MetricInput]] = None
|
metrics: Optional[List[MetricInput]] = None
|
||||||
conversion_type_params: Optional[ConversionTypeParams] = None
|
conversion_type_params: Optional[ConversionTypeParams] = None
|
||||||
|
cumulative_type_params: Optional[CumulativeTypeParams] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -113,6 +131,7 @@ class Metric(GraphResource):
|
|||||||
type_params: MetricTypeParams
|
type_params: MetricTypeParams
|
||||||
filter: Optional[WhereFilterIntersection] = None
|
filter: Optional[WhereFilterIntersection] = None
|
||||||
metadata: Optional[SourceFileMetadata] = None
|
metadata: Optional[SourceFileMetadata] = None
|
||||||
|
time_granularity: Optional[str] = None
|
||||||
resource_type: Literal[NodeType.Metric]
|
resource_type: Literal[NodeType.Metric]
|
||||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
|||||||
@@ -1,16 +1,19 @@
|
|||||||
|
import enum
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, List, Literal, Optional
|
from typing import Dict, List, Literal, Optional
|
||||||
|
|
||||||
from dbt.artifacts.resources.types import AccessType, NodeType
|
from dbt.artifacts.resources.types import AccessType, NodeType, TimePeriod
|
||||||
from dbt.artifacts.resources.v1.components import (
|
from dbt.artifacts.resources.v1.components import (
|
||||||
CompiledResource,
|
CompiledResource,
|
||||||
DeferRelation,
|
DeferRelation,
|
||||||
NodeVersion,
|
NodeVersion,
|
||||||
|
Time,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||||
from dbt_common.contracts.config.base import MergeBehavior
|
from dbt_common.contracts.config.base import MergeBehavior
|
||||||
from dbt_common.contracts.constraints import ModelLevelConstraint
|
from dbt_common.contracts.constraints import ModelLevelConstraint
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -21,6 +24,35 @@ class ModelConfig(NodeConfig):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CustomGranularity(dbtClassMixin):
|
||||||
|
name: str
|
||||||
|
column_name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TimeSpine(dbtClassMixin):
|
||||||
|
standard_granularity_column: str
|
||||||
|
custom_granularities: List[CustomGranularity] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelFreshnessDependsOnOptions(enum.Enum):
|
||||||
|
all = "all"
|
||||||
|
any = "any"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModelBuildAfter(Time):
|
||||||
|
depends_on: ModelFreshnessDependsOnOptions = ModelFreshnessDependsOnOptions.any
|
||||||
|
count: int = 0
|
||||||
|
period: TimePeriod = TimePeriod.hour
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModelFreshness(dbtClassMixin):
|
||||||
|
build_after: ModelBuildAfter = field(default_factory=ModelBuildAfter)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Model(CompiledResource):
|
class Model(CompiledResource):
|
||||||
resource_type: Literal[NodeType.Model]
|
resource_type: Literal[NodeType.Model]
|
||||||
@@ -32,6 +64,8 @@ class Model(CompiledResource):
|
|||||||
deprecation_date: Optional[datetime] = None
|
deprecation_date: Optional[datetime] = None
|
||||||
defer_relation: Optional[DeferRelation] = None
|
defer_relation: Optional[DeferRelation] = None
|
||||||
primary_key: List[str] = field(default_factory=list)
|
primary_key: List[str] = field(default_factory=list)
|
||||||
|
time_spine: Optional[TimeSpine] = None
|
||||||
|
freshness: Optional[ModelFreshness] = None
|
||||||
|
|
||||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||||
dct = super().__post_serialize__(dct, context)
|
dct = super().__post_serialize__(dct, context)
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Optional
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed
|
from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Owner(AdditionalPropertiesAllowed):
|
class Owner(AdditionalPropertiesAllowed):
|
||||||
email: Optional[str] = None
|
email: Union[str, List[str], None] = None
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
|
|||||||
@@ -2,21 +2,22 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Literal, Optional
|
from typing import Any, Dict, List, Literal, Optional, Union
|
||||||
|
|
||||||
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
|
||||||
ExportDestinationType,
|
|
||||||
)
|
|
||||||
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
|
from dbt.artifacts.resources.v1.config import list_str, metas
|
||||||
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
||||||
SourceFileMetadata,
|
SourceFileMetadata,
|
||||||
WhereFilterIntersection,
|
WhereFilterIntersection,
|
||||||
)
|
)
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
|
from dbt_common.contracts.config.metadata import ShowBehavior
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
||||||
|
ExportDestinationType,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -35,6 +36,7 @@ class Export(dbtClassMixin):
|
|||||||
|
|
||||||
name: str
|
name: str
|
||||||
config: ExportConfig
|
config: ExportConfig
|
||||||
|
unrendered_config: Dict[str, str] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -44,6 +46,8 @@ class QueryParams(dbtClassMixin):
|
|||||||
metrics: List[str]
|
metrics: List[str]
|
||||||
group_by: List[str]
|
group_by: List[str]
|
||||||
where: Optional[WhereFilterIntersection]
|
where: Optional[WhereFilterIntersection]
|
||||||
|
order_by: List[str] = field(default_factory=list)
|
||||||
|
limit: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -93,6 +97,10 @@ class SavedQuery(SavedQueryMandatory):
|
|||||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||||
created_at: float = field(default_factory=lambda: time.time())
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
refs: List[RefArgs] = field(default_factory=list)
|
refs: List[RefArgs] = field(default_factory=list)
|
||||||
|
tags: Union[List[str], str] = field(
|
||||||
|
default_factory=list_str,
|
||||||
|
metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metrics(self) -> List[str]:
|
def metrics(self) -> List[str]:
|
||||||
|
|||||||
@@ -1,29 +1,32 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List, Sequence, Tuple
|
from typing import List, Sequence, Tuple
|
||||||
|
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||||
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
||||||
WhereFilterParser,
|
WhereFilterParser,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WhereFilter(dbtClassMixin):
|
class WhereFilter(dbtClassMixin):
|
||||||
where_sql_template: str
|
where_sql_template: str
|
||||||
|
|
||||||
@property
|
def call_parameter_sets(
|
||||||
def call_parameter_sets(self) -> FilterCallParameterSets:
|
self, custom_granularity_names: Sequence[str]
|
||||||
return WhereFilterParser.parse_call_parameter_sets(self.where_sql_template)
|
) -> FilterCallParameterSets:
|
||||||
|
return WhereFilterParser.parse_call_parameter_sets(
|
||||||
|
self.where_sql_template, custom_granularity_names=custom_granularity_names
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WhereFilterIntersection(dbtClassMixin):
|
class WhereFilterIntersection(dbtClassMixin):
|
||||||
where_filters: List[WhereFilter]
|
where_filters: List[WhereFilter]
|
||||||
|
|
||||||
@property
|
def filter_expression_parameter_sets(
|
||||||
def filter_expression_parameter_sets(self) -> Sequence[Tuple[str, FilterCallParameterSets]]:
|
self, custom_granularity_names: Sequence[str]
|
||||||
|
) -> Sequence[Tuple[str, FilterCallParameterSets]]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,11 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Optional, Sequence
|
from typing import Any, Dict, List, Optional, Sequence
|
||||||
|
|
||||||
|
from dbt.artifacts.resources import SourceFileMetadata
|
||||||
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_semantic_interfaces.references import (
|
from dbt_semantic_interfaces.references import (
|
||||||
DimensionReference,
|
DimensionReference,
|
||||||
EntityReference,
|
EntityReference,
|
||||||
@@ -17,12 +22,6 @@ from dbt_semantic_interfaces.type_enums import (
|
|||||||
TimeGranularity,
|
TimeGranularity,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt.artifacts.resources import SourceFileMetadata
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The classes in this file are dataclasses which are used to construct the Semantic
|
The classes in this file are dataclasses which are used to construct the Semantic
|
||||||
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
||||||
@@ -32,6 +31,14 @@ https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_inter
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SemanticLayerElementConfig(dbtClassMixin):
|
||||||
|
meta: Dict[str, Any] = field(
|
||||||
|
default_factory=dict,
|
||||||
|
metadata=MergeBehavior.Update.meta(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Defaults(dbtClassMixin):
|
class Defaults(dbtClassMixin):
|
||||||
agg_time_dimension: Optional[str] = None
|
agg_time_dimension: Optional[str] = None
|
||||||
@@ -73,6 +80,7 @@ class Dimension(dbtClassMixin):
|
|||||||
type_params: Optional[DimensionTypeParams] = None
|
type_params: Optional[DimensionTypeParams] = None
|
||||||
expr: Optional[str] = None
|
expr: Optional[str] = None
|
||||||
metadata: Optional[SourceFileMetadata] = None
|
metadata: Optional[SourceFileMetadata] = None
|
||||||
|
config: Optional[SemanticLayerElementConfig] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def reference(self) -> DimensionReference:
|
def reference(self) -> DimensionReference:
|
||||||
@@ -107,6 +115,7 @@ class Entity(dbtClassMixin):
|
|||||||
label: Optional[str] = None
|
label: Optional[str] = None
|
||||||
role: Optional[str] = None
|
role: Optional[str] = None
|
||||||
expr: Optional[str] = None
|
expr: Optional[str] = None
|
||||||
|
config: Optional[SemanticLayerElementConfig] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def reference(self) -> EntityReference:
|
def reference(self) -> EntityReference:
|
||||||
@@ -148,6 +157,7 @@ class Measure(dbtClassMixin):
|
|||||||
agg_params: Optional[MeasureAggregationParameters] = None
|
agg_params: Optional[MeasureAggregationParameters] = None
|
||||||
non_additive_dimension: Optional[NonAdditiveDimension] = None
|
non_additive_dimension: Optional[NonAdditiveDimension] = None
|
||||||
agg_time_dimension: Optional[str] = None
|
agg_time_dimension: Optional[str] = None
|
||||||
|
config: Optional[SemanticLayerElementConfig] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def reference(self) -> MeasureReference:
|
def reference(self) -> MeasureReference:
|
||||||
|
|||||||
@@ -1,56 +1,74 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass, field
|
||||||
from typing import Dict, List, Literal, Optional, Union
|
from typing import Dict, List, Literal, Optional, Union
|
||||||
|
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import CompiledResource, DeferRelation
|
from dbt.artifacts.resources.v1.components import CompiledResource, DeferRelation
|
||||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||||
from dbt_common.dataclass_schema import ValidationError
|
from dbt_common.dataclass_schema import ValidationError, dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SnapshotMetaColumnNames(dbtClassMixin):
|
||||||
|
dbt_valid_to: Optional[str] = None
|
||||||
|
dbt_valid_from: Optional[str] = None
|
||||||
|
dbt_scd_id: Optional[str] = None
|
||||||
|
dbt_updated_at: Optional[str] = None
|
||||||
|
dbt_is_deleted: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class SnapshotConfig(NodeConfig):
|
class SnapshotConfig(NodeConfig):
|
||||||
materialized: str = "snapshot"
|
materialized: str = "snapshot"
|
||||||
strategy: Optional[str] = None
|
strategy: Optional[str] = None
|
||||||
unique_key: Optional[str] = None
|
unique_key: Union[str, List[str], None] = None
|
||||||
target_schema: Optional[str] = None
|
target_schema: Optional[str] = None
|
||||||
target_database: Optional[str] = None
|
target_database: Optional[str] = None
|
||||||
updated_at: Optional[str] = None
|
updated_at: Optional[str] = None
|
||||||
# Not using Optional because of serialization issues with a Union of str and List[str]
|
# Not using Optional because of serialization issues with a Union of str and List[str]
|
||||||
check_cols: Union[str, List[str], None] = None
|
check_cols: Union[str, List[str], None] = None
|
||||||
|
snapshot_meta_column_names: SnapshotMetaColumnNames = field(
|
||||||
@classmethod
|
default_factory=SnapshotMetaColumnNames
|
||||||
def validate(cls, data):
|
|
||||||
super().validate(data)
|
|
||||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
|
||||||
# will fail when parsing the snapshot node.
|
|
||||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
|
||||||
raise ValidationError(
|
|
||||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
|
||||||
"and 'target_schema'."
|
|
||||||
)
|
)
|
||||||
if data.get("strategy") == "check":
|
dbt_valid_to_current: Optional[str] = None
|
||||||
if not data.get("check_cols"):
|
|
||||||
|
@property
|
||||||
|
def snapshot_table_column_names(self):
|
||||||
|
return {
|
||||||
|
"dbt_valid_from": self.snapshot_meta_column_names.dbt_valid_from or "dbt_valid_from",
|
||||||
|
"dbt_valid_to": self.snapshot_meta_column_names.dbt_valid_to or "dbt_valid_to",
|
||||||
|
"dbt_scd_id": self.snapshot_meta_column_names.dbt_scd_id or "dbt_scd_id",
|
||||||
|
"dbt_updated_at": self.snapshot_meta_column_names.dbt_updated_at or "dbt_updated_at",
|
||||||
|
"dbt_is_deleted": self.snapshot_meta_column_names.dbt_is_deleted or "dbt_is_deleted",
|
||||||
|
}
|
||||||
|
|
||||||
|
def final_validate(self):
|
||||||
|
if not self.strategy or not self.unique_key:
|
||||||
|
raise ValidationError(
|
||||||
|
"Snapshots must be configured with a 'strategy' and 'unique_key'."
|
||||||
|
)
|
||||||
|
if self.strategy == "check":
|
||||||
|
if not self.check_cols:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"A snapshot configured with the check strategy must "
|
"A snapshot configured with the check strategy must "
|
||||||
"specify a check_cols configuration."
|
"specify a check_cols configuration."
|
||||||
)
|
)
|
||||||
if isinstance(data["check_cols"], str) and data["check_cols"] != "all":
|
if isinstance(self.check_cols, str) and self.check_cols != "all":
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
f"Invalid value for 'check_cols': {data['check_cols']}. "
|
f"Invalid value for 'check_cols': {self.check_cols}. "
|
||||||
"Expected 'all' or a list of strings."
|
"Expected 'all' or a list of strings."
|
||||||
)
|
)
|
||||||
elif data.get("strategy") == "timestamp":
|
elif self.strategy == "timestamp":
|
||||||
if not data.get("updated_at"):
|
if not self.updated_at:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"A snapshot configured with the timestamp strategy "
|
"A snapshot configured with the timestamp strategy "
|
||||||
"must specify an updated_at configuration."
|
"must specify an updated_at configuration."
|
||||||
)
|
)
|
||||||
if data.get("check_cols"):
|
if self.check_cols:
|
||||||
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
||||||
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
||||||
# formerly supported with GenericSnapshotConfig
|
# formerly supported with GenericSnapshotConfig
|
||||||
|
|
||||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
if self.materialized and self.materialized != "snapshot":
|
||||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||||
|
|
||||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from dbt_common.exceptions import CompilationError
|
|||||||
@dataclass
|
@dataclass
|
||||||
class SourceConfig(BaseConfig):
|
class SourceConfig(BaseConfig):
|
||||||
enabled: bool = True
|
enabled: bool = True
|
||||||
|
event_time: Any = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -58,6 +59,7 @@ class ParsedSourceMandatory(GraphResource, HasRelationMetadata):
|
|||||||
class SourceDefinition(ParsedSourceMandatory):
|
class SourceDefinition(ParsedSourceMandatory):
|
||||||
quoting: Quoting = field(default_factory=Quoting)
|
quoting: Quoting = field(default_factory=Quoting)
|
||||||
loaded_at_field: Optional[str] = None
|
loaded_at_field: Optional[str] = None
|
||||||
|
loaded_at_query: Optional[str] = None
|
||||||
freshness: Optional[FreshnessThreshold] = None
|
freshness: Optional[FreshnessThreshold] = None
|
||||||
external: Optional[ExternalTable] = None
|
external: Optional[ExternalTable] = None
|
||||||
description: str = ""
|
description: str = ""
|
||||||
@@ -70,3 +72,5 @@ class SourceDefinition(ParsedSourceMandatory):
|
|||||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||||
relation_name: Optional[str] = None
|
relation_name: Optional[str] = None
|
||||||
created_at: float = field(default_factory=lambda: time.time())
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
|
unrendered_database: Optional[str] = None
|
||||||
|
unrendered_schema: Optional[str] = None
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ class UnitTestConfig(BaseConfig):
|
|||||||
default_factory=dict,
|
default_factory=dict,
|
||||||
metadata=MergeBehavior.Update.meta(),
|
metadata=MergeBehavior.Update.meta(),
|
||||||
)
|
)
|
||||||
|
enabled: bool = True
|
||||||
|
|
||||||
|
|
||||||
class UnitTestFormat(StrEnum):
|
class UnitTestFormat(StrEnum):
|
||||||
|
|||||||
@@ -77,8 +77,11 @@ class BaseArtifactMetadata(dbtClassMixin):
|
|||||||
# remote-compile-result
|
# remote-compile-result
|
||||||
# remote-execution-result
|
# remote-execution-result
|
||||||
# remote-run-result
|
# remote-run-result
|
||||||
|
S = TypeVar("S", bound="VersionedSchema")
|
||||||
|
|
||||||
|
|
||||||
def schema_version(name: str, version: int):
|
def schema_version(name: str, version: int):
|
||||||
def inner(cls: Type[VersionedSchema]):
|
def inner(cls: Type[S]):
|
||||||
cls.dbt_schema_version = SchemaVersion(
|
cls.dbt_schema_version = SchemaVersion(
|
||||||
name=name,
|
name=name,
|
||||||
version=version,
|
version=version,
|
||||||
|
|||||||
24
core/dbt/artifacts/schemas/batch_results.py
Normal file
24
core/dbt/artifacts/schemas/batch_results.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
|
BatchType = Tuple[datetime, datetime]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BatchResults(dbtClassMixin):
|
||||||
|
successful: List[BatchType] = field(default_factory=list)
|
||||||
|
failed: List[BatchType] = field(default_factory=list)
|
||||||
|
|
||||||
|
def __add__(self, other: BatchResults) -> BatchResults:
|
||||||
|
return BatchResults(
|
||||||
|
successful=self.successful + other.successful,
|
||||||
|
failed=self.failed + other.failed,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.successful) + len(self.failed)
|
||||||
@@ -1,2 +1,11 @@
|
|||||||
# alias to latest
|
# alias to latest
|
||||||
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
||||||
|
from dbt_common.contracts.metadata import (
|
||||||
|
CatalogKey,
|
||||||
|
CatalogTable,
|
||||||
|
ColumnMap,
|
||||||
|
ColumnMetadata,
|
||||||
|
StatsDict,
|
||||||
|
StatsItem,
|
||||||
|
TableMetadata,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,71 +1,18 @@
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
from dbt.artifacts.schemas.base import (
|
from dbt.artifacts.schemas.base import (
|
||||||
ArtifactMixin,
|
ArtifactMixin,
|
||||||
BaseArtifactMetadata,
|
BaseArtifactMetadata,
|
||||||
schema_version,
|
schema_version,
|
||||||
)
|
)
|
||||||
|
from dbt_common.contracts.metadata import CatalogTable
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_common.utils.formatting import lowercase
|
|
||||||
|
|
||||||
Primitive = Union[bool, str, float, None]
|
Primitive = Union[bool, str, float, None]
|
||||||
PrimitiveDict = Dict[str, Primitive]
|
PrimitiveDict = Dict[str, Primitive]
|
||||||
|
|
||||||
CatalogKey = NamedTuple(
|
|
||||||
"CatalogKey", [("database", Optional[str]), ("schema", str), ("name", str)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class StatsItem(dbtClassMixin):
|
|
||||||
id: str
|
|
||||||
label: str
|
|
||||||
value: Primitive
|
|
||||||
include: bool
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
StatsDict = Dict[str, StatsItem]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ColumnMetadata(dbtClassMixin):
|
|
||||||
type: str
|
|
||||||
index: int
|
|
||||||
name: str
|
|
||||||
comment: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
ColumnMap = Dict[str, ColumnMetadata]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class TableMetadata(dbtClassMixin):
|
|
||||||
type: str
|
|
||||||
schema: str
|
|
||||||
name: str
|
|
||||||
database: Optional[str] = None
|
|
||||||
comment: Optional[str] = None
|
|
||||||
owner: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CatalogTable(dbtClassMixin):
|
|
||||||
metadata: TableMetadata
|
|
||||||
columns: ColumnMap
|
|
||||||
stats: StatsDict
|
|
||||||
# the same table with two unique IDs will just be listed two times
|
|
||||||
unique_id: Optional[str] = None
|
|
||||||
|
|
||||||
def key(self) -> CatalogKey:
|
|
||||||
return CatalogKey(
|
|
||||||
lowercase(self.metadata.database),
|
|
||||||
self.metadata.schema.lower(),
|
|
||||||
self.metadata.name.lower(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class CatalogMetadata(BaseArtifactMetadata):
|
class CatalogMetadata(BaseArtifactMetadata):
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from dbt.artifacts.schemas.base import (
|
|||||||
schema_version,
|
schema_version,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.schemas.upgrades import upgrade_manifest_json
|
from dbt.artifacts.schemas.upgrades import upgrade_manifest_json
|
||||||
|
from dbt_common.exceptions import DbtInternalError
|
||||||
|
|
||||||
NodeEdgeMap = Dict[str, List[str]]
|
NodeEdgeMap = Dict[str, List[str]]
|
||||||
UniqueID = str
|
UniqueID = str
|
||||||
@@ -180,3 +181,13 @@ class WritableManifest(ArtifactMixin):
|
|||||||
if manifest_schema_version < cls.dbt_schema_version.version:
|
if manifest_schema_version < cls.dbt_schema_version.version:
|
||||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||||
return cls.from_dict(data)
|
return cls.from_dict(data)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, _):
|
||||||
|
# When dbt try to load an artifact with additional optional fields
|
||||||
|
# that are not present in the schema, from_dict will work fine.
|
||||||
|
# As long as validate is not called, the schema will not be enforced.
|
||||||
|
# This is intentional, as it allows for safer schema upgrades.
|
||||||
|
raise DbtInternalError(
|
||||||
|
"The WritableManifest should never be validated directly to allow for schema upgrades."
|
||||||
|
)
|
||||||
|
|||||||
@@ -10,6 +10,12 @@ from dbt_common.utils import cast_to_int, cast_to_str
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TimingInfo(dbtClassMixin):
|
class TimingInfo(dbtClassMixin):
|
||||||
|
"""
|
||||||
|
Represents a step in the execution of a node.
|
||||||
|
`name` should be one of: compile, execute, or other
|
||||||
|
Do not call directly, use `collect_timing_info` instead.
|
||||||
|
"""
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
started_at: Optional[datetime] = None
|
started_at: Optional[datetime] = None
|
||||||
completed_at: Optional[datetime] = None
|
completed_at: Optional[datetime] = None
|
||||||
@@ -21,7 +27,7 @@ class TimingInfo(dbtClassMixin):
|
|||||||
self.completed_at = datetime.utcnow()
|
self.completed_at = datetime.utcnow()
|
||||||
|
|
||||||
def to_msg_dict(self):
|
def to_msg_dict(self):
|
||||||
msg_dict = {"name": self.name}
|
msg_dict = {"name": str(self.name)}
|
||||||
if self.started_at:
|
if self.started_at:
|
||||||
msg_dict["started_at"] = datetime_to_json_string(self.started_at)
|
msg_dict["started_at"] = datetime_to_json_string(self.started_at)
|
||||||
if self.completed_at:
|
if self.completed_at:
|
||||||
@@ -55,14 +61,18 @@ class NodeStatus(StrEnum):
|
|||||||
Fail = "fail"
|
Fail = "fail"
|
||||||
Warn = "warn"
|
Warn = "warn"
|
||||||
Skipped = "skipped"
|
Skipped = "skipped"
|
||||||
|
PartialSuccess = "partial success"
|
||||||
Pass = "pass"
|
Pass = "pass"
|
||||||
RuntimeErr = "runtime error"
|
RuntimeErr = "runtime error"
|
||||||
|
NoOp = "no-op"
|
||||||
|
|
||||||
|
|
||||||
class RunStatus(StrEnum):
|
class RunStatus(StrEnum):
|
||||||
Success = NodeStatus.Success
|
Success = NodeStatus.Success
|
||||||
Error = NodeStatus.Error
|
Error = NodeStatus.Error
|
||||||
Skipped = NodeStatus.Skipped
|
Skipped = NodeStatus.Skipped
|
||||||
|
PartialSuccess = NodeStatus.PartialSuccess
|
||||||
|
NoOp = NodeStatus.NoOp
|
||||||
|
|
||||||
|
|
||||||
class TestStatus(StrEnum):
|
class TestStatus(StrEnum):
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import threading
|
import threading
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
@@ -17,6 +19,7 @@ from dbt.artifacts.schemas.base import (
|
|||||||
get_artifact_schema_version,
|
get_artifact_schema_version,
|
||||||
schema_version,
|
schema_version,
|
||||||
)
|
)
|
||||||
|
from dbt.artifacts.schemas.batch_results import BatchResults
|
||||||
from dbt.artifacts.schemas.results import (
|
from dbt.artifacts.schemas.results import (
|
||||||
BaseResult,
|
BaseResult,
|
||||||
ExecutionResult,
|
ExecutionResult,
|
||||||
@@ -34,6 +37,7 @@ class RunResult(NodeResult):
|
|||||||
agate_table: Optional["agate.Table"] = field(
|
agate_table: Optional["agate.Table"] = field(
|
||||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||||
)
|
)
|
||||||
|
batch_results: Optional[BatchResults] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def skipped(self):
|
def skipped(self):
|
||||||
@@ -51,6 +55,7 @@ class RunResult(NodeResult):
|
|||||||
node=node,
|
node=node,
|
||||||
adapter_response={},
|
adapter_response={},
|
||||||
failures=None,
|
failures=None,
|
||||||
|
batch_results=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -67,6 +72,7 @@ class RunResultOutput(BaseResult):
|
|||||||
compiled: Optional[bool]
|
compiled: Optional[bool]
|
||||||
compiled_code: Optional[str]
|
compiled_code: Optional[str]
|
||||||
relation_name: Optional[str]
|
relation_name: Optional[str]
|
||||||
|
batch_results: Optional[BatchResults] = None
|
||||||
|
|
||||||
|
|
||||||
def process_run_result(result: RunResult) -> RunResultOutput:
|
def process_run_result(result: RunResult) -> RunResultOutput:
|
||||||
@@ -82,6 +88,7 @@ def process_run_result(result: RunResult) -> RunResultOutput:
|
|||||||
message=result.message,
|
message=result.message,
|
||||||
adapter_response=result.adapter_response,
|
adapter_response=result.adapter_response,
|
||||||
failures=result.failures,
|
failures=result.failures,
|
||||||
|
batch_results=result.batch_results,
|
||||||
compiled=result.node.compiled if compiled else None, # type:ignore
|
compiled=result.node.compiled if compiled else None, # type:ignore
|
||||||
compiled_code=result.node.compiled_code if compiled else None, # type:ignore
|
compiled_code=result.node.compiled_code if compiled else None, # type:ignore
|
||||||
relation_name=result.node.relation_name if compiled else None, # type:ignore
|
relation_name=result.node.relation_name if compiled else None, # type:ignore
|
||||||
@@ -158,7 +165,8 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def upgrade_schema_version(cls, data):
|
def upgrade_schema_version(cls, data):
|
||||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
|
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
|
||||||
|
"""
|
||||||
run_results_schema_version = get_artifact_schema_version(data)
|
run_results_schema_version = get_artifact_schema_version(data)
|
||||||
# If less than the current version (v5), preprocess contents to match latest schema version
|
# If less than the current version (v5), preprocess contents to match latest schema version
|
||||||
if run_results_schema_version <= 5:
|
if run_results_schema_version <= 5:
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
from typing import IO, Optional
|
from typing import IO, List, Optional, Union
|
||||||
|
|
||||||
from click.exceptions import ClickException
|
from click.exceptions import ClickException
|
||||||
|
|
||||||
|
from dbt.artifacts.schemas.catalog import CatalogArtifact
|
||||||
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.contracts.results import RunExecutionResult
|
||||||
from dbt.utils import ExitCodes
|
from dbt.utils import ExitCodes
|
||||||
|
|
||||||
|
|
||||||
@@ -23,7 +26,7 @@ class CliException(ClickException):
|
|||||||
|
|
||||||
# the typing of _file is to satisfy the signature of ClickException.show
|
# the typing of _file is to satisfy the signature of ClickException.show
|
||||||
# overriding this method prevents click from printing any exceptions to stdout
|
# overriding this method prevents click from printing any exceptions to stdout
|
||||||
def show(self, _file: Optional[IO] = None) -> None:
|
def show(self, _file: Optional[IO] = None) -> None: # type: ignore[type-arg]
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -31,7 +34,17 @@ class ResultExit(CliException):
|
|||||||
"""This class wraps any exception that contains results while invoking dbt, or the
|
"""This class wraps any exception that contains results while invoking dbt, or the
|
||||||
results of an invocation that did not succeed but did not throw any exceptions."""
|
results of an invocation that did not succeed but did not throw any exceptions."""
|
||||||
|
|
||||||
def __init__(self, result) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
result: Union[
|
||||||
|
bool, # debug
|
||||||
|
CatalogArtifact, # docs generate
|
||||||
|
List[str], # list/ls
|
||||||
|
Manifest, # parse
|
||||||
|
None, # clean, deps, init, source
|
||||||
|
RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation
|
||||||
|
] = None,
|
||||||
|
) -> None:
|
||||||
super().__init__(ExitCodes.ModelError)
|
super().__init__(ExitCodes.ModelError)
|
||||||
self.result = result
|
self.result = result
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pprint import pformat as pf
|
from pprint import pformat as pf
|
||||||
@@ -15,7 +16,7 @@ from dbt.cli.resolvers import default_log_path, default_project_dir
|
|||||||
from dbt.cli.types import Command as CliCommand
|
from dbt.cli.types import Command as CliCommand
|
||||||
from dbt.config.project import read_project_flags
|
from dbt.config.project import read_project_flags
|
||||||
from dbt.contracts.project import ProjectFlags
|
from dbt.contracts.project import ProjectFlags
|
||||||
from dbt.deprecations import renamed_env_var
|
from dbt.deprecations import fire_buffered_deprecations, renamed_env_var
|
||||||
from dbt.events import ALL_EVENT_NAMES
|
from dbt.events import ALL_EVENT_NAMES
|
||||||
from dbt_common import ui
|
from dbt_common import ui
|
||||||
from dbt_common.clients import jinja
|
from dbt_common.clients import jinja
|
||||||
@@ -37,6 +38,7 @@ FLAGS_DEFAULTS = {
|
|||||||
"STRICT_MODE": False,
|
"STRICT_MODE": False,
|
||||||
"STORE_FAILURES": False,
|
"STORE_FAILURES": False,
|
||||||
"INTROSPECT": True,
|
"INTROSPECT": True,
|
||||||
|
"STATE_MODIFIED_COMPARE_VARS": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
DEPRECATED_PARAMS = {
|
DEPRECATED_PARAMS = {
|
||||||
@@ -57,6 +59,7 @@ def convert_config(config_name, config_value):
|
|||||||
ret = WarnErrorOptions(
|
ret = WarnErrorOptions(
|
||||||
include=config_value.get("include", []),
|
include=config_value.get("include", []),
|
||||||
exclude=config_value.get("exclude", []),
|
exclude=config_value.get("exclude", []),
|
||||||
|
silence=config_value.get("silence", []),
|
||||||
valid_error_names=ALL_EVENT_NAMES,
|
valid_error_names=ALL_EVENT_NAMES,
|
||||||
)
|
)
|
||||||
return ret
|
return ret
|
||||||
@@ -91,6 +94,8 @@ class Flags:
|
|||||||
# Set the default flags.
|
# Set the default flags.
|
||||||
for key, value in FLAGS_DEFAULTS.items():
|
for key, value in FLAGS_DEFAULTS.items():
|
||||||
object.__setattr__(self, key, value)
|
object.__setattr__(self, key, value)
|
||||||
|
# Use to handle duplicate params in _assign_params
|
||||||
|
flags_defaults_list = list(FLAGS_DEFAULTS.keys())
|
||||||
|
|
||||||
if ctx is None:
|
if ctx is None:
|
||||||
ctx = get_current_context()
|
ctx = get_current_context()
|
||||||
@@ -172,13 +177,29 @@ class Flags:
|
|||||||
old_name=dep_param.envvar,
|
old_name=dep_param.envvar,
|
||||||
new_name=new_param.envvar,
|
new_name=new_param.envvar,
|
||||||
)
|
)
|
||||||
|
# end deprecated_params
|
||||||
|
|
||||||
# Set the flag value.
|
# Set the flag value.
|
||||||
is_duplicate = hasattr(self, param_name.upper())
|
is_duplicate = (
|
||||||
|
hasattr(self, param_name.upper())
|
||||||
|
and param_name.upper() not in flags_defaults_list
|
||||||
|
)
|
||||||
|
# First time through, set as though FLAGS_DEFAULTS hasn't been set, so not a duplicate.
|
||||||
|
# Subsequent pass (to process "parent" params) should be treated as duplicates.
|
||||||
|
if param_name.upper() in flags_defaults_list:
|
||||||
|
flags_defaults_list.remove(param_name.upper())
|
||||||
|
# Note: the following determines whether parameter came from click default,
|
||||||
|
# not from FLAGS_DEFAULTS in __init__.
|
||||||
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||||
|
is_envvar = ctx.get_parameter_source(param_name) == ParameterSource.ENVIRONMENT
|
||||||
|
|
||||||
flag_name = (new_name or param_name).upper()
|
flag_name = (new_name or param_name).upper()
|
||||||
|
|
||||||
if (is_duplicate and not is_default) or not is_duplicate:
|
# envvar flags are assigned in either parent or child context if there
|
||||||
|
# isn't an overriding cli command flag.
|
||||||
|
# If the flag has been encountered as a child cli flag, we don't
|
||||||
|
# want to overwrite with parent envvar, since the commandline flag takes precedence.
|
||||||
|
if (is_duplicate and not (is_default or is_envvar)) or not is_duplicate:
|
||||||
object.__setattr__(self, flag_name, param_value)
|
object.__setattr__(self, flag_name, param_value)
|
||||||
|
|
||||||
# Track default assigned params.
|
# Track default assigned params.
|
||||||
@@ -289,6 +310,13 @@ class Flags:
|
|||||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Handle arguments mutually exclusive with INLINE
|
||||||
|
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECT", "INLINE"])
|
||||||
|
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECTOR", "INLINE"])
|
||||||
|
|
||||||
|
# Check event_time configs for validity
|
||||||
|
self._validate_event_time_configs()
|
||||||
|
|
||||||
# Support lower cased access for legacy code.
|
# Support lower cased access for legacy code.
|
||||||
params = set(
|
params = set(
|
||||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||||
@@ -315,7 +343,9 @@ class Flags:
|
|||||||
"""
|
"""
|
||||||
set_flag = None
|
set_flag = None
|
||||||
for flag in group:
|
for flag in group:
|
||||||
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
flag_set_by_user = (
|
||||||
|
hasattr(self, flag) and flag.lower() not in params_assigned_from_default
|
||||||
|
)
|
||||||
if flag_set_by_user and set_flag:
|
if flag_set_by_user and set_flag:
|
||||||
raise DbtUsageException(
|
raise DbtUsageException(
|
||||||
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||||
@@ -323,6 +353,36 @@ class Flags:
|
|||||||
elif flag_set_by_user:
|
elif flag_set_by_user:
|
||||||
set_flag = flag
|
set_flag = flag
|
||||||
|
|
||||||
|
def _validate_event_time_configs(self) -> None:
|
||||||
|
event_time_start: datetime = (
|
||||||
|
getattr(self, "EVENT_TIME_START") if hasattr(self, "EVENT_TIME_START") else None
|
||||||
|
)
|
||||||
|
event_time_end: datetime = (
|
||||||
|
getattr(self, "EVENT_TIME_END") if hasattr(self, "EVENT_TIME_END") else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# only do validations if at least one of `event_time_start` or `event_time_end` are specified
|
||||||
|
if event_time_start is not None or event_time_end is not None:
|
||||||
|
|
||||||
|
# These `ifs`, combined with the parent `if` make it so that `event_time_start` and
|
||||||
|
# `event_time_end` are mutually required
|
||||||
|
if event_time_start is None:
|
||||||
|
raise DbtUsageException(
|
||||||
|
"The flag `--event-time-end` was specified, but `--event-time-start` was not. "
|
||||||
|
"When specifying `--event-time-end`, `--event-time-start` must also be present."
|
||||||
|
)
|
||||||
|
if event_time_end is None:
|
||||||
|
raise DbtUsageException(
|
||||||
|
"The flag `--event-time-start` was specified, but `--event-time-end` was not. "
|
||||||
|
"When specifying `--event-time-start`, `--event-time-end` must also be present."
|
||||||
|
)
|
||||||
|
|
||||||
|
# This `if` just is a sanity check that `event_time_start` is before `event_time_end`
|
||||||
|
if event_time_start >= event_time_end:
|
||||||
|
raise DbtUsageException(
|
||||||
|
"Value for `--event-time-start` must be less than `--event-time-end`"
|
||||||
|
)
|
||||||
|
|
||||||
def fire_deprecations(self):
|
def fire_deprecations(self):
|
||||||
"""Fires events for deprecated env_var usage."""
|
"""Fires events for deprecated env_var usage."""
|
||||||
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
||||||
@@ -330,6 +390,8 @@ class Flags:
|
|||||||
# not get pickled when written to disk as json.
|
# not get pickled when written to disk as json.
|
||||||
object.__delattr__(self, "deprecated_env_var_warnings")
|
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||||
|
|
||||||
|
fire_buffered_deprecations()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||||
command_arg_list = command_params(command, args_dict)
|
command_arg_list = command_params(command, args_dict)
|
||||||
|
|||||||
@@ -8,12 +8,15 @@ from click.exceptions import BadOptionUsage
|
|||||||
from click.exceptions import Exit as ClickExit
|
from click.exceptions import Exit as ClickExit
|
||||||
from click.exceptions import NoSuchOption, UsageError
|
from click.exceptions import NoSuchOption, UsageError
|
||||||
|
|
||||||
|
from dbt.adapters.factory import register_adapter
|
||||||
from dbt.artifacts.schemas.catalog import CatalogArtifact
|
from dbt.artifacts.schemas.catalog import CatalogArtifact
|
||||||
from dbt.artifacts.schemas.run import RunExecutionResult
|
from dbt.artifacts.schemas.run import RunExecutionResult
|
||||||
from dbt.cli import params as p
|
from dbt.cli import params as p
|
||||||
from dbt.cli import requires
|
from dbt.cli import requires
|
||||||
from dbt.cli.exceptions import DbtInternalException, DbtUsageException
|
from dbt.cli.exceptions import DbtInternalException, DbtUsageException
|
||||||
|
from dbt.cli.requires import setup_manifest
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.mp_context import get_mp_context
|
||||||
from dbt_common.events.base_types import EventMsg
|
from dbt_common.events.base_types import EventMsg
|
||||||
|
|
||||||
|
|
||||||
@@ -137,6 +140,7 @@ def global_flags(func):
|
|||||||
@p.warn_error
|
@p.warn_error
|
||||||
@p.warn_error_options
|
@p.warn_error_options
|
||||||
@p.write_json
|
@p.write_json
|
||||||
|
@p.use_fast_test_edges
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
@@ -165,6 +169,8 @@ def cli(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@global_flags
|
@global_flags
|
||||||
@p.empty
|
@p.empty
|
||||||
|
@p.event_time_start
|
||||||
|
@p.event_time_end
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.export_saved_queries
|
@p.export_saved_queries
|
||||||
@p.full_refresh
|
@p.full_refresh
|
||||||
@@ -218,8 +224,7 @@ def clean(ctx, **kwargs):
|
|||||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||||
from dbt.task.clean import CleanTask
|
from dbt.task.clean import CleanTask
|
||||||
|
|
||||||
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
with CleanTask(ctx.obj["flags"], ctx.obj["project"]) as task:
|
||||||
|
|
||||||
results = task.run()
|
results = task.run()
|
||||||
success = task.interpret_results(results)
|
success = task.interpret_results(results)
|
||||||
return results, success
|
return results, success
|
||||||
@@ -274,6 +279,7 @@ def docs_generate(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@global_flags
|
@global_flags
|
||||||
@p.browser
|
@p.browser
|
||||||
|
@p.host
|
||||||
@p.port
|
@p.port
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@@ -352,6 +358,7 @@ def compile(ctx, **kwargs):
|
|||||||
@p.select
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.inline
|
@p.inline
|
||||||
|
@p.inline_direct
|
||||||
@p.target_path
|
@p.target_path
|
||||||
@p.threads
|
@p.threads
|
||||||
@p.vars
|
@p.vars
|
||||||
@@ -360,12 +367,21 @@ def compile(ctx, **kwargs):
|
|||||||
@requires.profile
|
@requires.profile
|
||||||
@requires.project
|
@requires.project
|
||||||
@requires.runtime_config
|
@requires.runtime_config
|
||||||
@requires.manifest
|
|
||||||
def show(ctx, **kwargs):
|
def show(ctx, **kwargs):
|
||||||
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
|
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
|
||||||
results. Does not materialize anything to the warehouse."""
|
results. Does not materialize anything to the warehouse."""
|
||||||
from dbt.task.show import ShowTask
|
from dbt.task.show import ShowTask, ShowTaskDirect
|
||||||
|
|
||||||
|
if ctx.obj["flags"].inline_direct:
|
||||||
|
# Issue the inline query directly, with no templating. Does not require
|
||||||
|
# loading the manifest.
|
||||||
|
register_adapter(ctx.obj["runtime_config"], get_mp_context())
|
||||||
|
task = ShowTaskDirect(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
setup_manifest(ctx)
|
||||||
task = ShowTask(
|
task = ShowTask(
|
||||||
ctx.obj["flags"],
|
ctx.obj["flags"],
|
||||||
ctx.obj["runtime_config"],
|
ctx.obj["runtime_config"],
|
||||||
@@ -436,7 +452,7 @@ def deps(ctx, **kwargs):
|
|||||||
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
||||||
option_name="--add-package",
|
option_name="--add-package",
|
||||||
)
|
)
|
||||||
task = DepsTask(flags, ctx.obj["project"])
|
with DepsTask(flags, ctx.obj["project"]) as task:
|
||||||
results = task.run()
|
results = task.run()
|
||||||
success = task.interpret_results(results)
|
success = task.interpret_results(results)
|
||||||
return results, success
|
return results, success
|
||||||
@@ -458,8 +474,7 @@ def init(ctx, **kwargs):
|
|||||||
"""Initialize a new dbt project."""
|
"""Initialize a new dbt project."""
|
||||||
from dbt.task.init import InitTask
|
from dbt.task.init import InitTask
|
||||||
|
|
||||||
task = InitTask(ctx.obj["flags"])
|
with InitTask(ctx.obj["flags"]) as task:
|
||||||
|
|
||||||
results = task.run()
|
results = task.run()
|
||||||
success = task.interpret_results(results)
|
success = task.interpret_results(results)
|
||||||
return results, success
|
return results, success
|
||||||
@@ -538,6 +553,8 @@ def parse(ctx, **kwargs):
|
|||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.empty
|
@p.empty
|
||||||
|
@p.event_time_start
|
||||||
|
@p.event_time_end
|
||||||
@p.select
|
@p.select
|
||||||
@p.selector
|
@p.selector
|
||||||
@p.target_path
|
@p.target_path
|
||||||
@@ -700,6 +717,7 @@ def seed(ctx, **kwargs):
|
|||||||
@cli.command("snapshot")
|
@cli.command("snapshot")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@global_flags
|
@global_flags
|
||||||
|
@p.empty
|
||||||
@p.exclude
|
@p.exclude
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@@ -782,6 +800,8 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@global_flags
|
@global_flags
|
||||||
@p.exclude
|
@p.exclude
|
||||||
|
@p.resource_type
|
||||||
|
@p.exclude_resource_type
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@p.select
|
@p.select
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from click import Choice, ParamType
|
from click import Choice, ParamType
|
||||||
|
|
||||||
from dbt.config.utils import exclusive_primary_alt_value_setting, parse_cli_yaml_string
|
from dbt.config.utils import normalize_warn_error_options, parse_cli_yaml_string
|
||||||
from dbt.events import ALL_EVENT_NAMES
|
from dbt.events import ALL_EVENT_NAMES
|
||||||
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
||||||
from dbt_common.exceptions import DbtValidationError
|
from dbt_common.exceptions import DbtValidationError
|
||||||
@@ -51,12 +51,7 @@ class WarnErrorOptionsType(YAML):
|
|||||||
def convert(self, value, param, ctx):
|
def convert(self, value, param, ctx):
|
||||||
# this function is being used by param in click
|
# this function is being used by param in click
|
||||||
include_exclude = super().convert(value, param, ctx)
|
include_exclude = super().convert(value, param, ctx)
|
||||||
exclusive_primary_alt_value_setting(
|
normalize_warn_error_options(include_exclude)
|
||||||
include_exclude, "include", "error", "warn_error_options"
|
|
||||||
)
|
|
||||||
exclusive_primary_alt_value_setting(
|
|
||||||
include_exclude, "exclude", "warn", "warn_error_options"
|
|
||||||
)
|
|
||||||
|
|
||||||
return WarnErrorOptions(
|
return WarnErrorOptions(
|
||||||
include=include_exclude.get("include", []),
|
include=include_exclude.get("include", []),
|
||||||
|
|||||||
@@ -91,6 +91,22 @@ empty = click.option(
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
event_time_end = click.option(
|
||||||
|
"--event-time-end",
|
||||||
|
envvar="DBT_EVENT_TIME_END",
|
||||||
|
help="If specified, the end datetime dbt uses to filter microbatch model inputs (exclusive).",
|
||||||
|
type=click.DateTime(),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
event_time_start = click.option(
|
||||||
|
"--event-time-start",
|
||||||
|
envvar="DBT_EVENT_TIME_START",
|
||||||
|
help="If specified, the start datetime dbt uses to filter microbatch model inputs (inclusive).",
|
||||||
|
type=click.DateTime(),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
exclude = click.option(
|
exclude = click.option(
|
||||||
"--exclude",
|
"--exclude",
|
||||||
envvar=None,
|
envvar=None,
|
||||||
@@ -135,6 +151,14 @@ full_refresh = click.option(
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
host = click.option(
|
||||||
|
"--host",
|
||||||
|
envvar="DBT_HOST",
|
||||||
|
help="host to serve dbt docs on",
|
||||||
|
type=click.STRING,
|
||||||
|
default="127.0.0.1",
|
||||||
|
)
|
||||||
|
|
||||||
indirect_selection = click.option(
|
indirect_selection = click.option(
|
||||||
"--indirect-selection",
|
"--indirect-selection",
|
||||||
envvar="DBT_INDIRECT_SELECTION",
|
envvar="DBT_INDIRECT_SELECTION",
|
||||||
@@ -463,6 +487,13 @@ inline = click.option(
|
|||||||
help="Pass SQL inline to dbt compile and show",
|
help="Pass SQL inline to dbt compile and show",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
inline_direct = click.option(
|
||||||
|
"--inline-direct",
|
||||||
|
envvar=None,
|
||||||
|
help="Internal flag to pass SQL inline to dbt show. Do not load the entire project or apply templating.",
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||||
@@ -704,3 +735,10 @@ show_resource_report = click.option(
|
|||||||
envvar="DBT_SHOW_RESOURCE_REPORT",
|
envvar="DBT_SHOW_RESOURCE_REPORT",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
use_fast_test_edges = click.option(
|
||||||
|
"--use-fast-test-edges/--no-use-fast-test-edges",
|
||||||
|
envvar="DBT_USE_FAST_TEST_EDGES",
|
||||||
|
default=False,
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|||||||
@@ -41,7 +41,13 @@ from dbt_common.events.functions import LOG_VERSION, fire_event
|
|||||||
from dbt_common.events.helpers import get_json_string_utcnow
|
from dbt_common.events.helpers import get_json_string_utcnow
|
||||||
from dbt_common.exceptions import DbtBaseException as DbtException
|
from dbt_common.exceptions import DbtBaseException as DbtException
|
||||||
from dbt_common.invocation import reset_invocation_id
|
from dbt_common.invocation import reset_invocation_id
|
||||||
from dbt_common.record import Recorder, RecorderMode, get_record_mode_from_env
|
from dbt_common.record import (
|
||||||
|
Recorder,
|
||||||
|
RecorderMode,
|
||||||
|
get_record_mode_from_env,
|
||||||
|
get_record_types_from_dict,
|
||||||
|
get_record_types_from_env,
|
||||||
|
)
|
||||||
from dbt_common.utils import cast_dict_to_dict_of_strings
|
from dbt_common.utils import cast_dict_to_dict_of_strings
|
||||||
|
|
||||||
|
|
||||||
@@ -101,13 +107,23 @@ def preflight(func):
|
|||||||
|
|
||||||
def setup_record_replay():
|
def setup_record_replay():
|
||||||
rec_mode = get_record_mode_from_env()
|
rec_mode = get_record_mode_from_env()
|
||||||
|
rec_types = get_record_types_from_env()
|
||||||
|
|
||||||
recorder: Optional[Recorder] = None
|
recorder: Optional[Recorder] = None
|
||||||
if rec_mode == RecorderMode.REPLAY:
|
if rec_mode == RecorderMode.REPLAY:
|
||||||
recording_path = os.environ["DBT_REPLAY"]
|
previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH")
|
||||||
recorder = Recorder(RecorderMode.REPLAY, recording_path)
|
recorder = Recorder(
|
||||||
|
RecorderMode.REPLAY, types=rec_types, previous_recording_path=previous_recording_path
|
||||||
|
)
|
||||||
|
elif rec_mode == RecorderMode.DIFF:
|
||||||
|
previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH")
|
||||||
|
# ensure types match the previous recording
|
||||||
|
types = get_record_types_from_dict(previous_recording_path)
|
||||||
|
recorder = Recorder(
|
||||||
|
RecorderMode.DIFF, types=types, previous_recording_path=previous_recording_path
|
||||||
|
)
|
||||||
elif rec_mode == RecorderMode.RECORD:
|
elif rec_mode == RecorderMode.RECORD:
|
||||||
recorder = Recorder(RecorderMode.RECORD)
|
recorder = Recorder(RecorderMode.RECORD, types=rec_types)
|
||||||
|
|
||||||
get_invocation_context().recorder = recorder
|
get_invocation_context().recorder = recorder
|
||||||
|
|
||||||
@@ -116,7 +132,10 @@ def tear_down_record_replay():
|
|||||||
recorder = get_invocation_context().recorder
|
recorder = get_invocation_context().recorder
|
||||||
if recorder is not None:
|
if recorder is not None:
|
||||||
if recorder.mode == RecorderMode.RECORD:
|
if recorder.mode == RecorderMode.RECORD:
|
||||||
recorder.write("recording.json")
|
recorder.write()
|
||||||
|
if recorder.mode == RecorderMode.DIFF:
|
||||||
|
recorder.write()
|
||||||
|
recorder.write_diffs(diff_file_name="recording_diffs.json")
|
||||||
elif recorder.mode == RecorderMode.REPLAY:
|
elif recorder.mode == RecorderMode.REPLAY:
|
||||||
recorder.write_diffs("replay_diffs.json")
|
recorder.write_diffs("replay_diffs.json")
|
||||||
|
|
||||||
@@ -160,9 +179,11 @@ def postflight(func):
|
|||||||
process_in_blocks=rusage.ru_inblock,
|
process_in_blocks=rusage.ru_inblock,
|
||||||
process_out_blocks=rusage.ru_oublock,
|
process_out_blocks=rusage.ru_oublock,
|
||||||
),
|
),
|
||||||
|
(
|
||||||
EventLevel.INFO
|
EventLevel.INFO
|
||||||
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
||||||
else None,
|
else None
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(
|
fire_event(
|
||||||
@@ -303,6 +324,20 @@ def manifest(*args0, write=True, write_perf_info=False):
|
|||||||
ctx = args[0]
|
ctx = args[0]
|
||||||
assert isinstance(ctx, Context)
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
setup_manifest(ctx, write=write, write_perf_info=write_perf_info)
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
# if there are no args, the decorator was used without params @decorator
|
||||||
|
# otherwise, the decorator was called with params @decorator(arg)
|
||||||
|
if len(args0) == 0:
|
||||||
|
return outer_wrapper
|
||||||
|
return outer_wrapper(args0[0])
|
||||||
|
|
||||||
|
|
||||||
|
def setup_manifest(ctx: Context, write: bool = True, write_perf_info: bool = False):
|
||||||
|
"""Load the manifest and add it to the context."""
|
||||||
req_strs = ["profile", "project", "runtime_config"]
|
req_strs = ["profile", "project", "runtime_config"]
|
||||||
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
||||||
|
|
||||||
@@ -319,18 +354,7 @@ def manifest(*args0, write=True, write_perf_info=False):
|
|||||||
else:
|
else:
|
||||||
register_adapter(runtime_config, get_mp_context())
|
register_adapter(runtime_config, get_mp_context())
|
||||||
adapter = get_adapter(runtime_config)
|
adapter = get_adapter(runtime_config)
|
||||||
adapter.set_macro_context_generator(generate_runtime_macro_context)
|
adapter.set_macro_context_generator(generate_runtime_macro_context) # type: ignore[arg-type]
|
||||||
adapter.set_macro_resolver(ctx.obj["manifest"])
|
adapter.set_macro_resolver(ctx.obj["manifest"])
|
||||||
query_header_context = generate_query_header_context(
|
query_header_context = generate_query_header_context(adapter.config, ctx.obj["manifest"]) # type: ignore[attr-defined]
|
||||||
adapter.config, ctx.obj["manifest"]
|
|
||||||
)
|
|
||||||
adapter.connections.set_query_header(query_header_context)
|
adapter.connections.set_query_header(query_header_context)
|
||||||
return func(*args, **kwargs)
|
|
||||||
|
|
||||||
return update_wrapper(wrapper, func)
|
|
||||||
|
|
||||||
# if there are no args, the decorator was used without params @decorator
|
|
||||||
# otherwise, the decorator was called with params @decorator(arg)
|
|
||||||
if len(args0) == 0:
|
|
||||||
return outer_wrapper
|
|
||||||
return outer_wrapper(args0[0])
|
|
||||||
|
|||||||
@@ -1,29 +1,50 @@
|
|||||||
from typing import Any, Dict, Optional
|
import typing
|
||||||
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
from dbt.exceptions import MacroNamespaceNotStringError
|
from dbt.artifacts.resources import RefArgs
|
||||||
|
from dbt.exceptions import MacroNamespaceNotStringError, ParsingError
|
||||||
from dbt_common.clients.jinja import get_environment
|
from dbt_common.clients.jinja import get_environment
|
||||||
from dbt_common.exceptions.macros import MacroNameNotStringError
|
from dbt_common.exceptions.macros import MacroNameNotStringError
|
||||||
from dbt_common.tests import test_caching_enabled
|
from dbt_common.tests import test_caching_enabled
|
||||||
|
from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore
|
||||||
|
|
||||||
_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {}
|
if typing.TYPE_CHECKING:
|
||||||
|
from dbt.context.providers import ParseDatabaseWrapper
|
||||||
|
|
||||||
|
|
||||||
def statically_extract_macro_calls(string, ctx, db_wrapper=None):
|
_TESTING_MACRO_CACHE: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def statically_extract_has_name_this(source: str) -> bool:
|
||||||
|
"""Checks whether the raw jinja has any references to `this`"""
|
||||||
|
env = get_environment(None, capture_macros=True)
|
||||||
|
parsed = env.parse(source)
|
||||||
|
names = tuple(parsed.find_all(jinja2.nodes.Name))
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
if hasattr(name, "name") and name.name == "this":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def statically_extract_macro_calls(
|
||||||
|
source: str, ctx: Dict[str, Any], db_wrapper: Optional["ParseDatabaseWrapper"] = None
|
||||||
|
) -> List[str]:
|
||||||
# set 'capture_macros' to capture undefined
|
# set 'capture_macros' to capture undefined
|
||||||
env = get_environment(None, capture_macros=True)
|
env = get_environment(None, capture_macros=True)
|
||||||
|
|
||||||
global _TESTING_MACRO_CACHE
|
global _TESTING_MACRO_CACHE
|
||||||
if test_caching_enabled() and string in _TESTING_MACRO_CACHE:
|
if test_caching_enabled() and source in _TESTING_MACRO_CACHE:
|
||||||
parsed = _TESTING_MACRO_CACHE.get(string, None)
|
parsed = _TESTING_MACRO_CACHE.get(source, None)
|
||||||
func_calls = getattr(parsed, "_dbt_cached_calls")
|
func_calls = getattr(parsed, "_dbt_cached_calls")
|
||||||
else:
|
else:
|
||||||
parsed = env.parse(string)
|
parsed = env.parse(source)
|
||||||
func_calls = tuple(parsed.find_all(jinja2.nodes.Call))
|
func_calls = tuple(parsed.find_all(jinja2.nodes.Call))
|
||||||
|
|
||||||
if test_caching_enabled():
|
if test_caching_enabled():
|
||||||
_TESTING_MACRO_CACHE[string] = parsed
|
_TESTING_MACRO_CACHE[source] = parsed
|
||||||
setattr(parsed, "_dbt_cached_calls", func_calls)
|
setattr(parsed, "_dbt_cached_calls", func_calls)
|
||||||
|
|
||||||
standard_calls = ["source", "ref", "config"]
|
standard_calls = ["source", "ref", "config"]
|
||||||
@@ -67,30 +88,9 @@ def statically_extract_macro_calls(string, ctx, db_wrapper=None):
|
|||||||
return possible_macro_calls
|
return possible_macro_calls
|
||||||
|
|
||||||
|
|
||||||
# Call(
|
def statically_parse_adapter_dispatch(
|
||||||
# node=Getattr(
|
func_call, ctx: Dict[str, Any], db_wrapper: Optional["ParseDatabaseWrapper"]
|
||||||
# node=Name(
|
) -> List[str]:
|
||||||
# name='adapter',
|
|
||||||
# ctx='load'
|
|
||||||
# ),
|
|
||||||
# attr='dispatch',
|
|
||||||
# ctx='load'
|
|
||||||
# ),
|
|
||||||
# args=[
|
|
||||||
# Const(value='test_pkg_and_dispatch')
|
|
||||||
# ],
|
|
||||||
# kwargs=[
|
|
||||||
# Keyword(
|
|
||||||
# key='packages',
|
|
||||||
# value=Call(node=Getattr(node=Name(name='local_utils', ctx='load'),
|
|
||||||
# attr='_get_utils_namespaces', ctx='load'), args=[], kwargs=[],
|
|
||||||
# dyn_args=None, dyn_kwargs=None)
|
|
||||||
# )
|
|
||||||
# ],
|
|
||||||
# dyn_args=None,
|
|
||||||
# dyn_kwargs=None
|
|
||||||
# )
|
|
||||||
def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
|
||||||
possible_macro_calls = []
|
possible_macro_calls = []
|
||||||
# This captures an adapter.dispatch('<macro_name>') call.
|
# This captures an adapter.dispatch('<macro_name>') call.
|
||||||
|
|
||||||
@@ -142,7 +142,7 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
|||||||
|
|
||||||
if db_wrapper:
|
if db_wrapper:
|
||||||
macro = db_wrapper.dispatch(func_name, macro_namespace=macro_namespace).macro
|
macro = db_wrapper.dispatch(func_name, macro_namespace=macro_namespace).macro
|
||||||
func_name = f"{macro.package_name}.{macro.name}"
|
func_name = f"{macro.package_name}.{macro.name}" # type: ignore[attr-defined]
|
||||||
possible_macro_calls.append(func_name)
|
possible_macro_calls.append(func_name)
|
||||||
else: # this is only for tests/unit/test_macro_calls.py
|
else: # this is only for tests/unit/test_macro_calls.py
|
||||||
if macro_namespace:
|
if macro_namespace:
|
||||||
@@ -153,3 +153,93 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
|||||||
possible_macro_calls.append(f"{package_name}.{func_name}")
|
possible_macro_calls.append(f"{package_name}.{func_name}")
|
||||||
|
|
||||||
return possible_macro_calls
|
return possible_macro_calls
|
||||||
|
|
||||||
|
|
||||||
|
def statically_parse_ref_or_source(expression: str) -> Union[RefArgs, List[str]]:
|
||||||
|
"""
|
||||||
|
Returns a RefArgs or List[str] object, corresponding to ref or source respectively, given an input jinja expression.
|
||||||
|
|
||||||
|
input: str representing how input node is referenced in tested model sql
|
||||||
|
* examples:
|
||||||
|
- "ref('my_model_a')"
|
||||||
|
- "ref('my_model_a', version=3)"
|
||||||
|
- "ref('package', 'my_model_a', version=3)"
|
||||||
|
- "source('my_source_schema', 'my_source_name')"
|
||||||
|
|
||||||
|
If input is not a well-formed jinja ref or source expression, a ParsingError is raised.
|
||||||
|
"""
|
||||||
|
ref_or_source: Union[RefArgs, List[str]]
|
||||||
|
|
||||||
|
try:
|
||||||
|
statically_parsed = py_extract_from_source(f"{{{{ {expression} }}}}")
|
||||||
|
except ExtractionError:
|
||||||
|
raise ParsingError(f"Invalid jinja expression: {expression}")
|
||||||
|
|
||||||
|
if statically_parsed.get("refs"):
|
||||||
|
raw_ref = list(statically_parsed["refs"])[0]
|
||||||
|
ref_or_source = RefArgs(
|
||||||
|
package=raw_ref.get("package"),
|
||||||
|
name=raw_ref.get("name"),
|
||||||
|
version=raw_ref.get("version"),
|
||||||
|
)
|
||||||
|
elif statically_parsed.get("sources"):
|
||||||
|
source_name, source_table_name = list(statically_parsed["sources"])[0]
|
||||||
|
ref_or_source = [source_name, source_table_name]
|
||||||
|
else:
|
||||||
|
raise ParsingError(f"Invalid ref or source expression: {expression}")
|
||||||
|
|
||||||
|
return ref_or_source
|
||||||
|
|
||||||
|
|
||||||
|
def statically_parse_unrendered_config(string: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Given a string with jinja, extract an unrendered config call.
|
||||||
|
If no config call is present, returns None.
|
||||||
|
|
||||||
|
For example, given:
|
||||||
|
"{{ config(materialized=env_var('DBT_TEST_STATE_MODIFIED')) }}\nselect 1 as id"
|
||||||
|
returns: {'materialized': "Keyword(key='materialized', value=Call(node=Name(name='env_var', ctx='load'), args=[Const(value='DBT_TEST_STATE_MODIFIED')], kwargs=[], dyn_args=None, dyn_kwargs=None))"}
|
||||||
|
|
||||||
|
No config call:
|
||||||
|
"select 1 as id"
|
||||||
|
returns: None
|
||||||
|
"""
|
||||||
|
# Return early to avoid creating jinja environemt if no config call in input string
|
||||||
|
if "config(" not in string:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# set 'capture_macros' to capture undefined
|
||||||
|
env = get_environment(None, capture_macros=True)
|
||||||
|
|
||||||
|
global _TESTING_MACRO_CACHE
|
||||||
|
if test_caching_enabled() and _TESTING_MACRO_CACHE and string in _TESTING_MACRO_CACHE:
|
||||||
|
parsed = _TESTING_MACRO_CACHE.get(string, None)
|
||||||
|
func_calls = getattr(parsed, "_dbt_cached_calls")
|
||||||
|
else:
|
||||||
|
parsed = env.parse(string)
|
||||||
|
func_calls = tuple(parsed.find_all(jinja2.nodes.Call))
|
||||||
|
|
||||||
|
config_func_calls = list(
|
||||||
|
filter(
|
||||||
|
lambda f: hasattr(f, "node") and hasattr(f.node, "name") and f.node.name == "config",
|
||||||
|
func_calls,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# There should only be one {{ config(...) }} call per input
|
||||||
|
config_func_call = config_func_calls[0] if config_func_calls else None
|
||||||
|
|
||||||
|
if not config_func_call:
|
||||||
|
return None
|
||||||
|
|
||||||
|
unrendered_config = {}
|
||||||
|
for kwarg in config_func_call.kwargs:
|
||||||
|
unrendered_config[kwarg.key] = construct_static_kwarg_value(kwarg)
|
||||||
|
|
||||||
|
return unrendered_config
|
||||||
|
|
||||||
|
|
||||||
|
def construct_static_kwarg_value(kwarg) -> str:
|
||||||
|
# Instead of trying to re-assemble complex kwarg value, simply stringify the value.
|
||||||
|
# This is still useful to be able to detect changes in unrendered configs, even if it is
|
||||||
|
# not an exact representation of the user input.
|
||||||
|
return str(kwarg)
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
|
import dataclasses
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
from collections import defaultdict
|
from collections import defaultdict, deque
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||||
|
|
||||||
import networkx as nx # type: ignore
|
import networkx as nx # type: ignore
|
||||||
import sqlparse
|
import sqlparse
|
||||||
@@ -21,6 +22,7 @@ from dbt.contracts.graph.nodes import (
|
|||||||
InjectedCTE,
|
InjectedCTE,
|
||||||
ManifestNode,
|
ManifestNode,
|
||||||
ManifestSQLNode,
|
ManifestSQLNode,
|
||||||
|
ModelNode,
|
||||||
SeedNode,
|
SeedNode,
|
||||||
UnitTestDefinition,
|
UnitTestDefinition,
|
||||||
UnitTestNode,
|
UnitTestNode,
|
||||||
@@ -29,12 +31,15 @@ from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
|||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
DbtInternalError,
|
DbtInternalError,
|
||||||
DbtRuntimeError,
|
DbtRuntimeError,
|
||||||
|
ForeignKeyConstraintToSyntaxError,
|
||||||
GraphDependencyNotFoundError,
|
GraphDependencyNotFoundError,
|
||||||
|
ParsingError,
|
||||||
)
|
)
|
||||||
from dbt.flags import get_flags
|
from dbt.flags import get_flags
|
||||||
from dbt.graph import Graph
|
from dbt.graph import Graph
|
||||||
from dbt.node_types import ModelLanguage, NodeType
|
from dbt.node_types import ModelLanguage, NodeType
|
||||||
from dbt_common.clients.system import make_directory
|
from dbt_common.clients.system import make_directory
|
||||||
|
from dbt_common.contracts.constraints import ConstraintType
|
||||||
from dbt_common.events.contextvars import get_node_info
|
from dbt_common.events.contextvars import get_node_info
|
||||||
from dbt_common.events.format import pluralize
|
from dbt_common.events.format import pluralize
|
||||||
from dbt_common.events.functions import fire_event
|
from dbt_common.events.functions import fire_event
|
||||||
@@ -113,6 +118,16 @@ def _get_tests_for_node(manifest: Manifest, unique_id: UniqueID) -> List[UniqueI
|
|||||||
return tests
|
return tests
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class SeenDetails:
|
||||||
|
node_id: UniqueID
|
||||||
|
visits: int = 0
|
||||||
|
ancestors: Set[UniqueID] = dataclasses.field(default_factory=set)
|
||||||
|
awaits_tests: Set[Tuple[UniqueID, Tuple[UniqueID, ...]]] = dataclasses.field(
|
||||||
|
default_factory=set
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Linker:
|
class Linker:
|
||||||
def __init__(self, data=None) -> None:
|
def __init__(self, data=None) -> None:
|
||||||
if data is None:
|
if data is None:
|
||||||
@@ -191,19 +206,62 @@ class Linker:
|
|||||||
raise RuntimeError("Found a cycle: {}".format(cycle))
|
raise RuntimeError("Found a cycle: {}".format(cycle))
|
||||||
|
|
||||||
def add_test_edges(self, manifest: Manifest) -> None:
|
def add_test_edges(self, manifest: Manifest) -> None:
|
||||||
|
if not get_flags().USE_FAST_TEST_EDGES:
|
||||||
|
self.add_test_edges_1(manifest)
|
||||||
|
else:
|
||||||
|
self.add_test_edges_2(manifest)
|
||||||
|
|
||||||
|
def add_test_edges_1(self, manifest: Manifest) -> None:
|
||||||
"""This method adds additional edges to the DAG. For a given non-test
|
"""This method adds additional edges to the DAG. For a given non-test
|
||||||
executable node, add an edge from an upstream test to the given node if
|
executable node, add an edge from an upstream test to the given node if
|
||||||
the set of nodes the test depends on is a subset of the upstream nodes
|
the set of nodes the test depends on is a subset of the upstream nodes
|
||||||
for the given node."""
|
for the given node."""
|
||||||
|
|
||||||
# Given a graph:
|
# HISTORICAL NOTE: To understand the motivation behind this function,
|
||||||
|
# consider a node A with tests and a node B which depends (either directly
|
||||||
|
# or indirectly) on A. It would be nice if B were not executed until
|
||||||
|
# all of the tests on A are finished. After all, we don't want to
|
||||||
|
# propagate bad data. We can enforce that behavior by adding new
|
||||||
|
# dependencies (edges) from tests to nodes that should wait on them.
|
||||||
|
#
|
||||||
|
# This function implements a rough approximation of the behavior just
|
||||||
|
# described. In fact, for tests that only depend on a single node, it
|
||||||
|
# always works.
|
||||||
|
#
|
||||||
|
# Things get trickier for tests that depend on multiple nodes. In that
|
||||||
|
# case, if we are not careful, we will introduce cycles. That seems to
|
||||||
|
# be the reason this function adds dependencies from a downstream node to
|
||||||
|
# an upstream test if and only if the downstream node is already a
|
||||||
|
# descendant of all the nodes the upstream test depends on. By following
|
||||||
|
# that rule, it never makes the node dependent on new upstream nodes other
|
||||||
|
# than the tests themselves, and no cycles will be created.
|
||||||
|
#
|
||||||
|
# One drawback (Drawback 1) of the approach taken in this function is
|
||||||
|
# that it could still allow a downstream node to proceed before all
|
||||||
|
# testing is done on its ancestors, if it happens to have ancestors that
|
||||||
|
# are not also ancestors of a test with multiple dependencies.
|
||||||
|
#
|
||||||
|
# Another drawback (Drawback 2) is that the approach below adds far more
|
||||||
|
# edges than are strictly needed. After all, if we have A -> B -> C,
|
||||||
|
# there is no need to add a new edge A -> C. But this function often does.
|
||||||
|
#
|
||||||
|
# Drawback 2 is resolved in the new add_test_edges_2() implementation
|
||||||
|
# below, which is also typically much faster. Drawback 1 has been left in
|
||||||
|
# place in order to conservatively retain existing behavior, and so that
|
||||||
|
# the new implementation can be verified against this existing
|
||||||
|
# implementation by ensuring both resulting graphs have the same transitive
|
||||||
|
# reduction.
|
||||||
|
|
||||||
|
# MOTIVATING IDEA: Given a graph...
|
||||||
|
#
|
||||||
# model1 --> model2 --> model3
|
# model1 --> model2 --> model3
|
||||||
# | |
|
# | |
|
||||||
# | \/
|
# | \/
|
||||||
# \/ test 2
|
# \/ test 2
|
||||||
# test1
|
# test1
|
||||||
#
|
#
|
||||||
# Produce the following graph:
|
# ...produce the following...
|
||||||
|
#
|
||||||
# model1 --> model2 --> model3
|
# model1 --> model2 --> model3
|
||||||
# | /\ | /\ /\
|
# | /\ | /\ /\
|
||||||
# | | \/ | |
|
# | | \/ | |
|
||||||
@@ -243,6 +301,139 @@ class Linker:
|
|||||||
if test_depends_on.issubset(upstream_nodes):
|
if test_depends_on.issubset(upstream_nodes):
|
||||||
self.graph.add_edge(upstream_test, node_id, edge_type="parent_test")
|
self.graph.add_edge(upstream_test, node_id, edge_type="parent_test")
|
||||||
|
|
||||||
|
def add_test_edges_2(self, manifest: Manifest):
|
||||||
|
graph = self.graph
|
||||||
|
new_edges = self._get_test_edges_2(graph, manifest)
|
||||||
|
for e in new_edges:
|
||||||
|
graph.add_edge(e[0], e[1], edge_type="parent_test")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_test_edges_2(
|
||||||
|
graph: nx.DiGraph, manifest: Manifest
|
||||||
|
) -> Iterable[Tuple[UniqueID, UniqueID]]:
|
||||||
|
# This function enforces the same execution behavior as add_test_edges,
|
||||||
|
# but executes far more quickly and adds far fewer edges. See the
|
||||||
|
# HISTORICAL NOTE above.
|
||||||
|
#
|
||||||
|
# The idea is to first scan for "single-tested" nodes (which have tests
|
||||||
|
# that depend only upon on that node) and "multi-tested" nodes (which
|
||||||
|
# have tests that depend on multiple nodes). Single-tested nodes are
|
||||||
|
# handled quickly and easily.
|
||||||
|
#
|
||||||
|
# The less common but more complex case of multi-tested nodes is handled
|
||||||
|
# by a specialized function.
|
||||||
|
|
||||||
|
new_edges: List[Tuple[UniqueID, UniqueID]] = []
|
||||||
|
|
||||||
|
source_nodes: List[UniqueID] = []
|
||||||
|
executable_nodes: Set[UniqueID] = set()
|
||||||
|
multi_tested_nodes = set()
|
||||||
|
# Dictionary mapping nodes with single-dep tests to a list of those tests.
|
||||||
|
single_tested_nodes: dict[UniqueID, List[UniqueID]] = defaultdict(list)
|
||||||
|
for node_id in graph.nodes:
|
||||||
|
manifest_node = manifest.nodes.get(node_id, None)
|
||||||
|
if manifest_node is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if next(graph.predecessors(node_id), None) is None:
|
||||||
|
source_nodes.append(node_id)
|
||||||
|
|
||||||
|
if manifest_node.resource_type != NodeType.Test:
|
||||||
|
executable_nodes.add(node_id)
|
||||||
|
else:
|
||||||
|
test_deps = manifest_node.depends_on_nodes
|
||||||
|
if len(test_deps) == 1:
|
||||||
|
single_tested_nodes[test_deps[0]].append(node_id)
|
||||||
|
elif len(test_deps) > 1:
|
||||||
|
multi_tested_nodes.update(manifest_node.depends_on_nodes)
|
||||||
|
|
||||||
|
# Now that we have all the necessary information conveniently organized,
|
||||||
|
# add new edges for single-tested nodes.
|
||||||
|
for node_id, test_ids in single_tested_nodes.items():
|
||||||
|
succs = [s for s in graph.successors(node_id) if s in executable_nodes]
|
||||||
|
for succ_id in succs:
|
||||||
|
for test_id in test_ids:
|
||||||
|
new_edges.append((test_id, succ_id))
|
||||||
|
|
||||||
|
# Get the edges for multi-tested nodes separately, if needed.
|
||||||
|
if len(multi_tested_nodes) > 0:
|
||||||
|
multi_test_edges = Linker._get_multi_test_edges(
|
||||||
|
graph, manifest, source_nodes, executable_nodes, multi_tested_nodes
|
||||||
|
)
|
||||||
|
new_edges += multi_test_edges
|
||||||
|
|
||||||
|
return new_edges
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_multi_test_edges(
|
||||||
|
graph: nx.DiGraph,
|
||||||
|
manifest: Manifest,
|
||||||
|
source_nodes: Iterable[UniqueID],
|
||||||
|
executable_nodes: Set[UniqueID],
|
||||||
|
multi_tested_nodes,
|
||||||
|
) -> List[Tuple[UniqueID, UniqueID]]:
|
||||||
|
# Works through the graph in a breadth-first style, processing nodes from
|
||||||
|
# a ready queue which initially consists of nodes with no ancestors,
|
||||||
|
# and adding more nodes to the ready queue after all their ancestors
|
||||||
|
# have been processed. All the while, the relevant details of all nodes
|
||||||
|
# "seen" by the search so far are maintained in a SeenDetails record,
|
||||||
|
# including the ancestor set which tests it is "awaiting" (i.e. tests of
|
||||||
|
# its ancestors). The processing step adds test edges when every dependency
|
||||||
|
# of an awaited test is an ancestor of a node that is being processed.
|
||||||
|
# Downstream nodes are then exempted from awaiting the test.
|
||||||
|
#
|
||||||
|
# Memory consumption is potentially O(n^2) with n the number of nodes in
|
||||||
|
# the graph, since the average number of ancestors and tests being awaited
|
||||||
|
# for each of the n nodes could itself be O(n) but we only track ancestors
|
||||||
|
# that are multi-tested, which should keep things closer to O(n) in real-
|
||||||
|
# world scenarios.
|
||||||
|
|
||||||
|
new_edges: List[Tuple[UniqueID, UniqueID]] = []
|
||||||
|
ready: deque = deque(source_nodes)
|
||||||
|
details = {node_id: SeenDetails(node_id) for node_id in source_nodes}
|
||||||
|
|
||||||
|
while len(ready) > 0:
|
||||||
|
curr_details: SeenDetails = details[ready.pop()]
|
||||||
|
test_ids = _get_tests_for_node(manifest, curr_details.node_id)
|
||||||
|
new_awaits_for_succs = curr_details.awaits_tests.copy()
|
||||||
|
for test_id in test_ids:
|
||||||
|
deps: List[UniqueID] = sorted(manifest.nodes[test_id].depends_on_nodes)
|
||||||
|
if len(deps) > 1:
|
||||||
|
# Tests with only one dep were already handled.
|
||||||
|
new_awaits_for_succs.add((test_id, tuple(deps)))
|
||||||
|
|
||||||
|
for succ_id in [
|
||||||
|
s for s in graph.successors(curr_details.node_id) if s in executable_nodes
|
||||||
|
]:
|
||||||
|
suc_details = details.get(succ_id, None)
|
||||||
|
if suc_details is None:
|
||||||
|
suc_details = SeenDetails(succ_id)
|
||||||
|
details[succ_id] = suc_details
|
||||||
|
suc_details.visits += 1
|
||||||
|
suc_details.awaits_tests.update(new_awaits_for_succs)
|
||||||
|
suc_details.ancestors.update(curr_details.ancestors)
|
||||||
|
if curr_details.node_id in multi_tested_nodes:
|
||||||
|
# Only track ancestry information for the set of nodes
|
||||||
|
# we will actually check against later.
|
||||||
|
suc_details.ancestors.add(curr_details.node_id)
|
||||||
|
|
||||||
|
if suc_details.visits == graph.in_degree(succ_id):
|
||||||
|
if len(suc_details.awaits_tests) > 0:
|
||||||
|
removes = set()
|
||||||
|
for awt in suc_details.awaits_tests:
|
||||||
|
if not any(True for a in awt[1] if a not in suc_details.ancestors):
|
||||||
|
removes.add(awt)
|
||||||
|
new_edges.append((awt[0], succ_id))
|
||||||
|
|
||||||
|
suc_details.awaits_tests.difference_update(removes)
|
||||||
|
ready.appendleft(succ_id)
|
||||||
|
|
||||||
|
# We are now done with the current node and all of its ancestors.
|
||||||
|
# Discard its details to save memory.
|
||||||
|
del details[curr_details.node_id]
|
||||||
|
|
||||||
|
return new_edges
|
||||||
|
|
||||||
def get_graph(self, manifest: Manifest) -> Graph:
|
def get_graph(self, manifest: Manifest) -> Graph:
|
||||||
self.link_graph(manifest)
|
self.link_graph(manifest)
|
||||||
return Graph(self.graph)
|
return Graph(self.graph)
|
||||||
@@ -371,7 +562,7 @@ class Compiler:
|
|||||||
|
|
||||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||||
|
|
||||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
new_cte_name = self.add_ephemeral_prefix(cte_model.identifier)
|
||||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
||||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||||
|
|
||||||
@@ -437,8 +628,31 @@ class Compiler:
|
|||||||
relation_name = str(relation_cls.create_from(self.config, node))
|
relation_name = str(relation_cls.create_from(self.config, node))
|
||||||
node.relation_name = relation_name
|
node.relation_name = relation_name
|
||||||
|
|
||||||
|
# Compile 'ref' and 'source' expressions in foreign key constraints
|
||||||
|
if isinstance(node, ModelNode):
|
||||||
|
for constraint in node.all_constraints:
|
||||||
|
if constraint.type == ConstraintType.foreign_key and constraint.to:
|
||||||
|
constraint.to = self._compile_relation_for_foreign_key_constraint_to(
|
||||||
|
manifest, node, constraint.to
|
||||||
|
)
|
||||||
|
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
def _compile_relation_for_foreign_key_constraint_to(
|
||||||
|
self, manifest: Manifest, node: ManifestSQLNode, to_expression: str
|
||||||
|
) -> str:
|
||||||
|
try:
|
||||||
|
foreign_key_node = manifest.find_node_from_ref_or_source(to_expression)
|
||||||
|
except ParsingError:
|
||||||
|
raise ForeignKeyConstraintToSyntaxError(node, to_expression)
|
||||||
|
|
||||||
|
if not foreign_key_node:
|
||||||
|
raise GraphDependencyNotFoundError(node, to_expression)
|
||||||
|
|
||||||
|
adapter = get_adapter(self.config)
|
||||||
|
relation_name = str(adapter.Relation.create_from(self.config, foreign_key_node))
|
||||||
|
return relation_name
|
||||||
|
|
||||||
# This method doesn't actually "compile" any of the nodes. That is done by the
|
# This method doesn't actually "compile" any of the nodes. That is done by the
|
||||||
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
||||||
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
||||||
@@ -494,7 +708,9 @@ class Compiler:
|
|||||||
linker.write_graph(graph_path, manifest)
|
linker.write_graph(graph_path, manifest)
|
||||||
|
|
||||||
# writes the "compiled_code" into the target/compiled directory
|
# writes the "compiled_code" into the target/compiled directory
|
||||||
def _write_node(self, node: ManifestSQLNode) -> ManifestSQLNode:
|
def _write_node(
|
||||||
|
self, node: ManifestSQLNode, split_suffix: Optional[str] = None
|
||||||
|
) -> ManifestSQLNode:
|
||||||
if not node.extra_ctes_injected or node.resource_type in (
|
if not node.extra_ctes_injected or node.resource_type in (
|
||||||
NodeType.Snapshot,
|
NodeType.Snapshot,
|
||||||
NodeType.Seed,
|
NodeType.Seed,
|
||||||
@@ -503,7 +719,9 @@ class Compiler:
|
|||||||
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
|
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
|
||||||
|
|
||||||
if node.compiled_code:
|
if node.compiled_code:
|
||||||
node.compiled_path = node.get_target_write_path(self.config.target_path, "compiled")
|
node.compiled_path = node.get_target_write_path(
|
||||||
|
self.config.target_path, "compiled", split_suffix
|
||||||
|
)
|
||||||
node.write_node(self.config.project_root, node.compiled_path, node.compiled_code)
|
node.write_node(self.config.project_root, node.compiled_path, node.compiled_code)
|
||||||
return node
|
return node
|
||||||
|
|
||||||
@@ -513,6 +731,7 @@ class Compiler:
|
|||||||
manifest: Manifest,
|
manifest: Manifest,
|
||||||
extra_context: Optional[Dict[str, Any]] = None,
|
extra_context: Optional[Dict[str, Any]] = None,
|
||||||
write: bool = True,
|
write: bool = True,
|
||||||
|
split_suffix: Optional[str] = None,
|
||||||
) -> ManifestSQLNode:
|
) -> ManifestSQLNode:
|
||||||
"""This is the main entry point into this code. It's called by
|
"""This is the main entry point into this code. It's called by
|
||||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||||
@@ -520,6 +739,8 @@ class Compiler:
|
|||||||
the node's raw_code into compiled_code, and then calls the
|
the node's raw_code into compiled_code, and then calls the
|
||||||
recursive method to "prepend" the ctes.
|
recursive method to "prepend" the ctes.
|
||||||
"""
|
"""
|
||||||
|
# REVIEW: UnitTestDefinition shouldn't be possible here because of the
|
||||||
|
# type of node, and it is likewise an invalid return type.
|
||||||
if isinstance(node, UnitTestDefinition):
|
if isinstance(node, UnitTestDefinition):
|
||||||
return node
|
return node
|
||||||
|
|
||||||
@@ -533,7 +754,7 @@ class Compiler:
|
|||||||
|
|
||||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||||
if write:
|
if write:
|
||||||
self._write_node(node)
|
self._write_node(node, split_suffix=split_suffix)
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from dbt import deprecations
|
|||||||
from dbt.adapters.contracts.connection import QueryComment
|
from dbt.adapters.contracts.connection import QueryComment
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.config.selectors import SelectorDict
|
from dbt.config.selectors import SelectorDict
|
||||||
from dbt.config.utils import exclusive_primary_alt_value_setting
|
from dbt.config.utils import normalize_warn_error_options
|
||||||
from dbt.constants import (
|
from dbt.constants import (
|
||||||
DBT_PROJECT_FILE_NAME,
|
DBT_PROJECT_FILE_NAME,
|
||||||
DEPENDENCIES_FILE_NAME,
|
DEPENDENCIES_FILE_NAME,
|
||||||
@@ -158,14 +158,8 @@ def _parse_versions(versions: Union[List[str], str]) -> List[VersionSpecifier]:
|
|||||||
return [VersionSpecifier.from_version_string(v) for v in versions]
|
return [VersionSpecifier.from_version_string(v) for v in versions]
|
||||||
|
|
||||||
|
|
||||||
def _all_source_paths(
|
def _all_source_paths(*args: List[str]) -> List[str]:
|
||||||
model_paths: List[str],
|
paths = chain(*args)
|
||||||
seed_paths: List[str],
|
|
||||||
snapshot_paths: List[str],
|
|
||||||
analysis_paths: List[str],
|
|
||||||
macro_paths: List[str],
|
|
||||||
) -> List[str]:
|
|
||||||
paths = chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)
|
|
||||||
# Strip trailing slashes since the path is the same even though the name is not
|
# Strip trailing slashes since the path is the same even though the name is not
|
||||||
stripped_paths = map(lambda s: s.rstrip("/"), paths)
|
stripped_paths = map(lambda s: s.rstrip("/"), paths)
|
||||||
return list(set(stripped_paths))
|
return list(set(stripped_paths))
|
||||||
@@ -205,6 +199,9 @@ def load_raw_project(project_root: str) -> Dict[str, Any]:
|
|||||||
if not isinstance(project_dict, dict):
|
if not isinstance(project_dict, dict):
|
||||||
raise DbtProjectError(f"{DBT_PROJECT_FILE_NAME} does not parse to a dictionary")
|
raise DbtProjectError(f"{DBT_PROJECT_FILE_NAME} does not parse to a dictionary")
|
||||||
|
|
||||||
|
if "tests" in project_dict and "data_tests" not in project_dict:
|
||||||
|
project_dict["data_tests"] = project_dict.pop("tests")
|
||||||
|
|
||||||
return project_dict
|
return project_dict
|
||||||
|
|
||||||
|
|
||||||
@@ -409,7 +406,7 @@ class PartialProject(RenderComponents):
|
|||||||
snapshot_paths: List[str] = value_or(cfg.snapshot_paths, ["snapshots"])
|
snapshot_paths: List[str] = value_or(cfg.snapshot_paths, ["snapshots"])
|
||||||
|
|
||||||
all_source_paths: List[str] = _all_source_paths(
|
all_source_paths: List[str] = _all_source_paths(
|
||||||
model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths
|
model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths, test_paths
|
||||||
)
|
)
|
||||||
|
|
||||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||||
@@ -480,6 +477,7 @@ class PartialProject(RenderComponents):
|
|||||||
rendered.selectors_dict["selectors"]
|
rendered.selectors_dict["selectors"]
|
||||||
)
|
)
|
||||||
dbt_cloud = cfg.dbt_cloud
|
dbt_cloud = cfg.dbt_cloud
|
||||||
|
flags: Dict[str, Any] = cfg.flags
|
||||||
|
|
||||||
project = Project(
|
project = Project(
|
||||||
project_name=name,
|
project_name=name,
|
||||||
@@ -524,6 +522,7 @@ class PartialProject(RenderComponents):
|
|||||||
project_env_vars=project_env_vars,
|
project_env_vars=project_env_vars,
|
||||||
restrict_access=cfg.restrict_access,
|
restrict_access=cfg.restrict_access,
|
||||||
dbt_cloud=dbt_cloud,
|
dbt_cloud=dbt_cloud,
|
||||||
|
flags=flags,
|
||||||
)
|
)
|
||||||
# sanity check - this means an internal issue
|
# sanity check - this means an internal issue
|
||||||
project.validate()
|
project.validate()
|
||||||
@@ -568,11 +567,6 @@ class PartialProject(RenderComponents):
|
|||||||
) = package_and_project_data_from_root(project_root)
|
) = package_and_project_data_from_root(project_root)
|
||||||
selectors_dict = selector_data_from_root(project_root)
|
selectors_dict = selector_data_from_root(project_root)
|
||||||
|
|
||||||
if "flags" in project_dict:
|
|
||||||
# We don't want to include "flags" in the Project,
|
|
||||||
# it goes in ProjectFlags
|
|
||||||
project_dict.pop("flags")
|
|
||||||
|
|
||||||
return cls.from_dicts(
|
return cls.from_dicts(
|
||||||
project_root=project_root,
|
project_root=project_root,
|
||||||
project_dict=project_dict,
|
project_dict=project_dict,
|
||||||
@@ -645,6 +639,7 @@ class Project:
|
|||||||
project_env_vars: Dict[str, Any]
|
project_env_vars: Dict[str, Any]
|
||||||
restrict_access: bool
|
restrict_access: bool
|
||||||
dbt_cloud: Dict[str, Any]
|
dbt_cloud: Dict[str, Any]
|
||||||
|
flags: Dict[str, Any]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all_source_paths(self) -> List[str]:
|
def all_source_paths(self) -> List[str]:
|
||||||
@@ -654,6 +649,7 @@ class Project:
|
|||||||
self.snapshot_paths,
|
self.snapshot_paths,
|
||||||
self.analysis_paths,
|
self.analysis_paths,
|
||||||
self.macro_paths,
|
self.macro_paths,
|
||||||
|
self.test_paths,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -724,6 +720,7 @@ class Project:
|
|||||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||||
"restrict-access": self.restrict_access,
|
"restrict-access": self.restrict_access,
|
||||||
"dbt-cloud": self.dbt_cloud,
|
"dbt-cloud": self.dbt_cloud,
|
||||||
|
"flags": self.flags,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if self.query_comment:
|
if self.query_comment:
|
||||||
@@ -821,20 +818,15 @@ def read_project_flags(project_dir: str, profiles_dir: str) -> ProjectFlags:
|
|||||||
|
|
||||||
if profile_project_flags:
|
if profile_project_flags:
|
||||||
# This can't use WARN_ERROR or WARN_ERROR_OPTIONS because they're in
|
# This can't use WARN_ERROR or WARN_ERROR_OPTIONS because they're in
|
||||||
# the config that we're loading. Uses special "warn" method.
|
# the config that we're loading. Uses special "buffer" method and fired after flags are initialized in preflight.
|
||||||
deprecations.warn("project-flags-moved")
|
deprecations.buffer("project-flags-moved")
|
||||||
project_flags = profile_project_flags
|
project_flags = profile_project_flags
|
||||||
|
|
||||||
if project_flags is not None:
|
if project_flags is not None:
|
||||||
# handle collapsing `include` and `error` as well as collapsing `exclude` and `warn`
|
# handle collapsing `include` and `error` as well as collapsing `exclude` and `warn`
|
||||||
# for warn_error_options
|
# for warn_error_options
|
||||||
warn_error_options = project_flags.get("warn_error_options")
|
warn_error_options = project_flags.get("warn_error_options", {})
|
||||||
exclusive_primary_alt_value_setting(
|
normalize_warn_error_options(warn_error_options)
|
||||||
warn_error_options, "include", "error", "warn_error_options"
|
|
||||||
)
|
|
||||||
exclusive_primary_alt_value_setting(
|
|
||||||
warn_error_options, "exclude", "warn", "warn_error_options"
|
|
||||||
)
|
|
||||||
|
|
||||||
ProjectFlags.validate(project_flags)
|
ProjectFlags.validate(project_flags)
|
||||||
return ProjectFlags.from_dict(project_flags)
|
return ProjectFlags.from_dict(project_flags)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user