mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 23:11:27 +00:00
Compare commits
574 Commits
v1.8.9
...
enable-pos
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf4384da38 | ||
|
|
71a6e53102 | ||
|
|
c4dc80dcd2 | ||
|
|
8097a34726 | ||
|
|
b66dff7278 | ||
|
|
22d21edb4b | ||
|
|
bef7928e22 | ||
|
|
c573131d91 | ||
|
|
f10d84d05e | ||
|
|
79a4c8969e | ||
|
|
9a80308fcf | ||
|
|
7a13d08376 | ||
|
|
9e9f5b8e57 | ||
|
|
9cd6a23eba | ||
|
|
e46c37cf07 | ||
|
|
df23f398a6 | ||
|
|
97df9278c0 | ||
|
|
748d352b6b | ||
|
|
bbd8fa02f1 | ||
|
|
61009f6ba7 | ||
|
|
ee7ecdc29f | ||
|
|
d74b58a137 | ||
|
|
12b04e7d2f | ||
|
|
5d56a052a7 | ||
|
|
62a8ea05a6 | ||
|
|
1219bd49aa | ||
|
|
791d1ebdcd | ||
|
|
148b9b41a5 | ||
|
|
d096a6776e | ||
|
|
8ff86d35ea | ||
|
|
087f8167ec | ||
|
|
bcb07ceb7b | ||
|
|
c559848044 | ||
|
|
3de0160b00 | ||
|
|
2c7f49a71e | ||
|
|
518c360a29 | ||
|
|
8cf51fddba | ||
|
|
8e128eee8e | ||
|
|
94b69b1578 | ||
|
|
0216e32c7f | ||
|
|
bbd078089e | ||
|
|
575bac3172 | ||
|
|
bca2211246 | ||
|
|
0015e35a1b | ||
|
|
09bce7af63 | ||
|
|
cb7c4a7dce | ||
|
|
5555a3dd25 | ||
|
|
0e30db4e82 | ||
|
|
b2ff6ab5a7 | ||
|
|
48218be274 | ||
|
|
500208c009 | ||
|
|
0162b71e94 | ||
|
|
811e4ee955 | ||
|
|
b79ec3c33b | ||
|
|
e32718e666 | ||
|
|
f6e0793d00 | ||
|
|
39cf2ec94f | ||
|
|
3caddd0b65 | ||
|
|
5dd516608b | ||
|
|
11ada88e48 | ||
|
|
b9e5c144a9 | ||
|
|
001e729664 | ||
|
|
7e10fc72d5 | ||
|
|
c170211ce3 | ||
|
|
8e800cee4c | ||
|
|
1bd81f5025 | ||
|
|
65a122b34a | ||
|
|
785304732f | ||
|
|
4693918a0f | ||
|
|
96738d5edc | ||
|
|
780544dc7f | ||
|
|
2190fa64a3 | ||
|
|
deb2f3e890 | ||
|
|
34f0190a14 | ||
|
|
7f9449660e | ||
|
|
ea172aa668 | ||
|
|
f0d3b8a51d | ||
|
|
c6afa4d0f2 | ||
|
|
98a1b6e272 | ||
|
|
d5071fa135 | ||
|
|
db284a14c1 | ||
|
|
4017802800 | ||
|
|
17a8816ee3 | ||
|
|
3bd425fdc9 | ||
|
|
db9a6e10c1 | ||
|
|
4a78a78c2b | ||
|
|
5ee5bf4129 | ||
|
|
ac445ca1fd | ||
|
|
1258728d9a | ||
|
|
15722264aa | ||
|
|
a6d4091b6b | ||
|
|
b1b3839715 | ||
|
|
963251df4e | ||
|
|
8c929c337e | ||
|
|
e949d1a6f9 | ||
|
|
538de17f78 | ||
|
|
96c9d80f83 | ||
|
|
2f842055f0 | ||
|
|
faeee357b1 | ||
|
|
bca5c4d454 | ||
|
|
b3d059e427 | ||
|
|
b783c97eff | ||
|
|
5add25db0c | ||
|
|
ad6ea20277 | ||
|
|
472b8057a9 | ||
|
|
2915c3e284 | ||
|
|
537daa8476 | ||
|
|
b48ad8282b | ||
|
|
7cab753863 | ||
|
|
19393a8080 | ||
|
|
1e61e3bfc6 | ||
|
|
a9dae5cac1 | ||
|
|
15010f1e6b | ||
|
|
2564b3d1f9 | ||
|
|
34bb3f94dd | ||
|
|
593a151220 | ||
|
|
1a251ee081 | ||
|
|
9b7cf25c33 | ||
|
|
26333f7f21 | ||
|
|
9bc7333e19 | ||
|
|
ee8884731b | ||
|
|
f1106ad61e | ||
|
|
ada5d3b82a | ||
|
|
64b58ec628 | ||
|
|
1e713db2fa | ||
|
|
6b7b1ad74b | ||
|
|
3e31117ba1 | ||
|
|
451b745aea | ||
|
|
d27232a946 | ||
|
|
b1705fb6f3 | ||
|
|
0e50851fa6 | ||
|
|
b75d5e701e | ||
|
|
f8b1a6dcd1 | ||
|
|
9010537499 | ||
|
|
56d3c9318b | ||
|
|
1fcce443ba | ||
|
|
de03d6f44f | ||
|
|
5db78ca6dd | ||
|
|
ada9e63c13 | ||
|
|
69d19eb5fc | ||
|
|
55bb3c304a | ||
|
|
693564de40 | ||
|
|
04a3df7324 | ||
|
|
31d974f5eb | ||
|
|
c1f64e216f | ||
|
|
8fa6e037d0 | ||
|
|
e1c98e8123 | ||
|
|
9955ea760a | ||
|
|
fdd0546700 | ||
|
|
45f21a7cda | ||
|
|
f250b503d5 | ||
|
|
aa42ff8986 | ||
|
|
3c2fdfe735 | ||
|
|
303c63ccc8 | ||
|
|
17ec11ad30 | ||
|
|
65598f3dc6 | ||
|
|
240a6056fb | ||
|
|
7cd8935b13 | ||
|
|
cd5d4be7ab | ||
|
|
5a23894584 | ||
|
|
70ad9319d2 | ||
|
|
8873581c5a | ||
|
|
1ffd059442 | ||
|
|
091ba5fe0b | ||
|
|
6bbcce1f1c | ||
|
|
0fff5760ff | ||
|
|
f4988c62e3 | ||
|
|
2e6d4f493d | ||
|
|
3e593600e0 | ||
|
|
87584c73b0 | ||
|
|
709bd11c71 | ||
|
|
f7f53732b2 | ||
|
|
32b8097a1f | ||
|
|
36f1143c31 | ||
|
|
cf7a465338 | ||
|
|
465aa0c2fc | ||
|
|
a0284edb6b | ||
|
|
d2bfb4e215 | ||
|
|
38443640ce | ||
|
|
86e0ad49aa | ||
|
|
972eb23d03 | ||
|
|
f56c3868cf | ||
|
|
66fc546766 | ||
|
|
c71d5f6665 | ||
|
|
6e0564a98b | ||
|
|
99827ea220 | ||
|
|
0db83d0abd | ||
|
|
98711cec75 | ||
|
|
4a8f9c181c | ||
|
|
5165716e3d | ||
|
|
65d428004a | ||
|
|
14fc39a76f | ||
|
|
8b4e2a138c | ||
|
|
a11ee322ae | ||
|
|
db8ca25da9 | ||
|
|
c264a7f2b9 | ||
|
|
da6f0a1bd7 | ||
|
|
c643a1d482 | ||
|
|
0f8f42639d | ||
|
|
ec2cf9b561 | ||
|
|
c6b7655b65 | ||
|
|
3e80ad7cc7 | ||
|
|
1efad4e68e | ||
|
|
f5e0a3b1b3 | ||
|
|
a64b5be25b | ||
|
|
b31718a31f | ||
|
|
f6d83c765c | ||
|
|
5b3b22a2e9 | ||
|
|
a9b26d03ce | ||
|
|
31cb5a9b72 | ||
|
|
e5dd4c57a6 | ||
|
|
e7a1c6c315 | ||
|
|
e355be6186 | ||
|
|
12850a36ec | ||
|
|
010411fed3 | ||
|
|
f64a4883eb | ||
|
|
2883933549 | ||
|
|
fe9c78eed8 | ||
|
|
a5ec58dab9 | ||
|
|
29a79557d5 | ||
|
|
35fc3fdda2 | ||
|
|
8931262fa2 | ||
|
|
85d31db1d4 | ||
|
|
d48476a08d | ||
|
|
02f695b423 | ||
|
|
3c95db9c00 | ||
|
|
fec20ff914 | ||
|
|
de38bc9b0d | ||
|
|
f4114130c9 | ||
|
|
e920053306 | ||
|
|
511ff8e0e9 | ||
|
|
0220941849 | ||
|
|
7594d42e02 | ||
|
|
bd08d13ddc | ||
|
|
5095e8d1e8 | ||
|
|
a1958c1193 | ||
|
|
2a4da100ff | ||
|
|
9c91ab27b1 | ||
|
|
3f56cbce5f | ||
|
|
7cca8470e0 | ||
|
|
c82ceaaf39 | ||
|
|
e2e86b788c | ||
|
|
6b747fe801 | ||
|
|
9e6facc4d1 | ||
|
|
5cd966cafa | ||
|
|
47d5d99693 | ||
|
|
359b195d23 | ||
|
|
2a64b7365f | ||
|
|
c6aeb4a291 | ||
|
|
5001e4f0e1 | ||
|
|
61648b5ed2 | ||
|
|
4aa5169212 | ||
|
|
729caf0d5e | ||
|
|
f26d82217e | ||
|
|
e264675db7 | ||
|
|
300aa09fc5 | ||
|
|
493008417c | ||
|
|
906e07c1f2 | ||
|
|
6a954e2d24 | ||
|
|
3b724acc54 | ||
|
|
b0ca1256ae | ||
|
|
9d7820c356 | ||
|
|
1fc193167d | ||
|
|
d9f96a95c1 | ||
|
|
138a2acf84 | ||
|
|
88ada4aa31 | ||
|
|
77d8e3262a | ||
|
|
94b6ae13b3 | ||
|
|
f7c4c3c9cc | ||
|
|
71a93b0cd3 | ||
|
|
7bdf27af31 | ||
|
|
e60b41d9fa | ||
|
|
2ba765d360 | ||
|
|
93e27548ce | ||
|
|
aa89740311 | ||
|
|
aa306693a5 | ||
|
|
7041e5822f | ||
|
|
a08255e4cb | ||
|
|
2cde93bf63 | ||
|
|
f29836fcf3 | ||
|
|
7f32e42230 | ||
|
|
55e0df181f | ||
|
|
588cbabe94 | ||
|
|
5f873da929 | ||
|
|
fdabe9534c | ||
|
|
c0423707b0 | ||
|
|
48d9afa677 | ||
|
|
d71f309c1e | ||
|
|
cb323ef78c | ||
|
|
22bc1c374e | ||
|
|
31881d2a3b | ||
|
|
1dcdcd2f52 | ||
|
|
3de3b827bf | ||
|
|
8a8857a85c | ||
|
|
e4d5a4e777 | ||
|
|
b414ef2cc5 | ||
|
|
57e279cc1b | ||
|
|
2eb1a5c3ea | ||
|
|
dcc9a0ca29 | ||
|
|
892c545985 | ||
|
|
a8702b8374 | ||
|
|
1592987de8 | ||
|
|
710600546a | ||
|
|
0bf38ce294 | ||
|
|
459d156e85 | ||
|
|
95c090bed0 | ||
|
|
f2222d2621 | ||
|
|
97ffc37405 | ||
|
|
bf18b59845 | ||
|
|
88e953e8aa | ||
|
|
6076cf7114 | ||
|
|
a1757934ef | ||
|
|
6c61cb7f7a | ||
|
|
4b1f1c4029 | ||
|
|
7df04b0fe4 | ||
|
|
662101590d | ||
|
|
fc6167a2ee | ||
|
|
983cbb4f28 | ||
|
|
c9582c2323 | ||
|
|
03fdb4c157 | ||
|
|
afe25a99fe | ||
|
|
e32b8a90ac | ||
|
|
1472b86ee2 | ||
|
|
ff6745c795 | ||
|
|
fdfe03d561 | ||
|
|
1b7d9b5704 | ||
|
|
c3d87b89fb | ||
|
|
0f084e16ca | ||
|
|
3464be7f70 | ||
|
|
407f6caa1c | ||
|
|
ad575ec699 | ||
|
|
f582ac2488 | ||
|
|
f5f0735d00 | ||
|
|
3abf575fa6 | ||
|
|
a42303c3af | ||
|
|
6fccfe84ea | ||
|
|
fd6ec71dab | ||
|
|
ae957599e1 | ||
|
|
f080346227 | ||
|
|
2a75dd4683 | ||
|
|
945539e3ae | ||
|
|
84230ce333 | ||
|
|
35c09203ad | ||
|
|
1625eb059a | ||
|
|
2c43af897d | ||
|
|
6e1f64f8b4 | ||
|
|
e9a2b548cb | ||
|
|
89caa33fb4 | ||
|
|
30b8a92e38 | ||
|
|
b95f7a7f2c | ||
|
|
e451a371e6 | ||
|
|
81067d4fc4 | ||
|
|
3198ce4809 | ||
|
|
0c51985c83 | ||
|
|
e26af57989 | ||
|
|
bdf28d7eff | ||
|
|
289d2dd932 | ||
|
|
8a17a0d7e7 | ||
|
|
8c6bec4fb5 | ||
|
|
7f5abdc565 | ||
|
|
f714e84282 | ||
|
|
7f92c6e003 | ||
|
|
8de0229a04 | ||
|
|
dd77210756 | ||
|
|
8df5c96f3d | ||
|
|
6b5db1796f | ||
|
|
3224589fe7 | ||
|
|
b71ceb3166 | ||
|
|
4d4b05effc | ||
|
|
316ecfca28 | ||
|
|
d07bfda9df | ||
|
|
8ae689c674 | ||
|
|
bdb79e8626 | ||
|
|
f7b7935a97 | ||
|
|
3d96b4e36c | ||
|
|
7920b0e71d | ||
|
|
a0674db840 | ||
|
|
ba6c7baf1d | ||
|
|
8be063502b | ||
|
|
78c05718c5 | ||
|
|
d18f50bbb8 | ||
|
|
ffa75ca9ff | ||
|
|
8f847167fa | ||
|
|
cd6bb9e782 | ||
|
|
ef9abe6c06 | ||
|
|
40c350ff21 | ||
|
|
c7d8693f70 | ||
|
|
6743e32574 | ||
|
|
f6cdacc61e | ||
|
|
5db0b81da1 | ||
|
|
fc8eb820aa | ||
|
|
fc83f5edfa | ||
|
|
8248d1eb53 | ||
|
|
6b9c1da1ae | ||
|
|
7940ad5c78 | ||
|
|
3ec8fa79bd | ||
|
|
396cf2d683 | ||
|
|
87b1143a62 | ||
|
|
75a09621cd | ||
|
|
5e9f1b515f | ||
|
|
25a68a990c | ||
|
|
a86e2b4ffc | ||
|
|
94917432f9 | ||
|
|
d1857b39ca | ||
|
|
2ff3f20863 | ||
|
|
5e3d418264 | ||
|
|
5d32aa8b62 | ||
|
|
d8b1bf53f7 | ||
|
|
1076352293 | ||
|
|
1fe9c1bbfe | ||
|
|
41e4836c0f | ||
|
|
b590045b9f | ||
|
|
1fd4d2eae6 | ||
|
|
ac66f91351 | ||
|
|
359a2c0cc5 | ||
|
|
bbdb98fa5d | ||
|
|
a8d4ba2b4a | ||
|
|
09e973d24a | ||
|
|
730e40a867 | ||
|
|
a1e4753020 | ||
|
|
3ac20ce7a8 | ||
|
|
aa23af98e5 | ||
|
|
46da967115 | ||
|
|
db694731c9 | ||
|
|
7016cd3085 | ||
|
|
9ca10fbfd9 | ||
|
|
3308a4365e | ||
|
|
f8bfd32ed6 | ||
|
|
3e437a6734 | ||
|
|
9e633f6178 | ||
|
|
d182d06644 | ||
|
|
054c6fde37 | ||
|
|
4c326e40b5 | ||
|
|
8fe5ea1ee7 | ||
|
|
16f5023f4d | ||
|
|
c6b8f7e595 | ||
|
|
77aeb3ea68 | ||
|
|
1e20772d33 | ||
|
|
8ce2c46a2f | ||
|
|
aeaaedcaa1 | ||
|
|
6c111f2e31 | ||
|
|
139b9ac54f | ||
|
|
cc8541c05f | ||
|
|
ab500a9709 | ||
|
|
1d3d315249 | ||
|
|
b35ad46e3f | ||
|
|
c28cb92af5 | ||
|
|
b56d96df5e | ||
|
|
37d382c8e7 | ||
|
|
9b7f4ff842 | ||
|
|
555ff8091f | ||
|
|
98fddcf54f | ||
|
|
d652359c61 | ||
|
|
f7d21e012e | ||
|
|
e1fa461186 | ||
|
|
1153597970 | ||
|
|
09f9febc25 | ||
|
|
22181409f6 | ||
|
|
f25a474f75 | ||
|
|
3c55806203 | ||
|
|
bba020fcc0 | ||
|
|
84eb0ff672 | ||
|
|
3695698e22 | ||
|
|
9ca1bc5b4c | ||
|
|
5f66678f6d | ||
|
|
63262e93cb | ||
|
|
374412af53 | ||
|
|
47848b8ea8 | ||
|
|
3d09872a56 | ||
|
|
dfa7d06526 | ||
|
|
7f57dd5a30 | ||
|
|
56bfbeaedd | ||
|
|
1dd26e79af | ||
|
|
86223609dd | ||
|
|
21a46332f1 | ||
|
|
ff2726c3b5 | ||
|
|
014444dc18 | ||
|
|
25c2042dc9 | ||
|
|
0a160fc27a | ||
|
|
c598741262 | ||
|
|
f9c2b9398f | ||
|
|
cab6dabbc7 | ||
|
|
e1621ebc54 | ||
|
|
cd90d4493c | ||
|
|
560d151dcd | ||
|
|
229c537748 | ||
|
|
79ad0a3243 | ||
|
|
c668846404 | ||
|
|
c4958de166 | ||
|
|
33161a3035 | ||
|
|
471b816dcd | ||
|
|
bef2d20c21 | ||
|
|
2a26fabfdf | ||
|
|
4c7d922a6d | ||
|
|
b03291548a | ||
|
|
a7af3b3831 | ||
|
|
6e4564ab05 | ||
|
|
1aeff2c58f | ||
|
|
601fee0d5f | ||
|
|
88b8b10df1 | ||
|
|
4ea0e1007c | ||
|
|
a309283a7c | ||
|
|
b10fa79ae8 | ||
|
|
37e2725038 | ||
|
|
37fd299ad0 | ||
|
|
a94027acea | ||
|
|
b59c9075e2 | ||
|
|
c215697a02 | ||
|
|
d936a630c1 | ||
|
|
11ee2b9c42 | ||
|
|
64c59476f4 | ||
|
|
2bae05b8ed | ||
|
|
ca163c3d6e | ||
|
|
9a796aa202 | ||
|
|
51ff85bb2d | ||
|
|
d389ff1450 | ||
|
|
4415731da4 | ||
|
|
0fdc83af9d | ||
|
|
71a8a41104 | ||
|
|
da19d7ba9f | ||
|
|
1475abb1cb | ||
|
|
27b2f965dd | ||
|
|
100352d6b4 | ||
|
|
8ee8b2560a | ||
|
|
d4a6482091 | ||
|
|
8639290108 | ||
|
|
e699f5d042 | ||
|
|
e977b3eee5 | ||
|
|
c5be8e2a93 | ||
|
|
bff116dbed | ||
|
|
4df120e40e | ||
|
|
e53420c1d0 | ||
|
|
88ccc8a447 | ||
|
|
a98059967d | ||
|
|
b680c7ae95 | ||
|
|
a677abd5e8 | ||
|
|
8c850b58cb | ||
|
|
a34267f54b | ||
|
|
155482851a | ||
|
|
81386a7a43 | ||
|
|
d8e38c1a1d | ||
|
|
3e37d77780 | ||
|
|
e0783c2922 | ||
|
|
c2d4643f9d | ||
|
|
84456f50f6 | ||
|
|
fb10bb4aea | ||
|
|
366d4ad04a | ||
|
|
0c08d7a19a | ||
|
|
eb71ad9a89 | ||
|
|
ad6120080a | ||
|
|
6813007e28 | ||
|
|
09243d1e5d | ||
|
|
0d297c2537 | ||
|
|
341803d784 | ||
|
|
32a7f82772 | ||
|
|
d21ff38ba1 | ||
|
|
2f7ef5f120 | ||
|
|
751139d8c1 | ||
|
|
03b621f390 | ||
|
|
4443a91c24 | ||
|
|
8fe7d652ab | ||
|
|
ecf9436c6e | ||
|
|
994a089b1f | ||
|
|
55aad328ea | ||
|
|
84cc54007e | ||
|
|
43d6c2f369 | ||
|
|
fe9e39dab8 | ||
|
|
760e4cea3a | ||
|
|
e349e01829 | ||
|
|
04ebe0f2a3 | ||
|
|
fb6dbc848e | ||
|
|
65791e4d12 | ||
|
|
3e8f2f1c27 | ||
|
|
29b83598e3 |
@@ -1,37 +0,0 @@
|
|||||||
[bumpversion]
|
|
||||||
current_version = 1.8.9
|
|
||||||
parse = (?P<major>[\d]+) # major version number
|
|
||||||
\.(?P<minor>[\d]+) # minor version number
|
|
||||||
\.(?P<patch>[\d]+) # patch version number
|
|
||||||
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
|
||||||
(?P<prekind>a|b|rc) # pre-release type
|
|
||||||
(?P<num>[\d]+) # pre-release version number
|
|
||||||
)?
|
|
||||||
( # optional nightly release indicator
|
|
||||||
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
|
||||||
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
|
||||||
serialize =
|
|
||||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
|
||||||
{major}.{minor}.{patch}.{nightly}
|
|
||||||
{major}.{minor}.{patch}{prekind}{num}
|
|
||||||
{major}.{minor}.{patch}
|
|
||||||
commit = False
|
|
||||||
tag = False
|
|
||||||
|
|
||||||
[bumpversion:part:prekind]
|
|
||||||
first_value = a
|
|
||||||
optional_value = final
|
|
||||||
values =
|
|
||||||
a
|
|
||||||
b
|
|
||||||
rc
|
|
||||||
final
|
|
||||||
|
|
||||||
[bumpversion:part:num]
|
|
||||||
first_value = 1
|
|
||||||
|
|
||||||
[bumpversion:part:nightly]
|
|
||||||
|
|
||||||
[bumpversion:file:core/setup.py]
|
|
||||||
|
|
||||||
[bumpversion:file:core/dbt/version.py]
|
|
||||||
@@ -3,6 +3,9 @@
|
|||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
|
||||||
|
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
||||||
|
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
||||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||||
|
|||||||
@@ -1,276 +0,0 @@
|
|||||||
## dbt-core 1.8.0 - May 09, 2024
|
|
||||||
|
|
||||||
### Breaking Changes
|
|
||||||
|
|
||||||
- Remove adapter.get_compiler interface ([#9148](https://github.com/dbt-labs/dbt-core/issues/9148))
|
|
||||||
- Move AdapterLogger to adapters folder ([#9151](https://github.com/dbt-labs/dbt-core/issues/9151))
|
|
||||||
- Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps --lock' ([#9100](https://github.com/dbt-labs/dbt-core/issues/9100))
|
|
||||||
- move event manager setup back to core, remove ref to global EVENT_MANAGER and clean up event manager functions ([#9150](https://github.com/dbt-labs/dbt-core/issues/9150))
|
|
||||||
- Remove dbt-tests-adapter and dbt-postgres packages from dbt-core ([#9455](https://github.com/dbt-labs/dbt-core/issues/9455))
|
|
||||||
- Update the default behaviour of require_explicit_package_overrides_for_builtin_materializations to True. ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Initial implementation of unit testing ([#8287](https://github.com/dbt-labs/dbt-core/issues/8287))
|
|
||||||
- Unit test manifest artifacts and selection ([#8295](https://github.com/dbt-labs/dbt-core/issues/8295))
|
|
||||||
- Support config with tags & meta for unit tests ([#8294](https://github.com/dbt-labs/dbt-core/issues/8294))
|
|
||||||
- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859))
|
|
||||||
- Enable inline csv fixtures in unit tests ([#8626](https://github.com/dbt-labs/dbt-core/issues/8626))
|
|
||||||
- Add drop_schema_named macro ([#8025](https://github.com/dbt-labs/dbt-core/issues/8025))
|
|
||||||
- migrate utils to common and adapters folders ([#8924](https://github.com/dbt-labs/dbt-core/issues/8924))
|
|
||||||
- Move Agate helper client into common ([#8926](https://github.com/dbt-labs/dbt-core/issues/8926))
|
|
||||||
- remove usage of dbt.config.PartialProject from dbt/adapters ([#8928](https://github.com/dbt-labs/dbt-core/issues/8928))
|
|
||||||
- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
|
||||||
- Support unit testing incremental models ([#8422](https://github.com/dbt-labs/dbt-core/issues/8422))
|
|
||||||
- Add support of csv file fixtures to unit testing ([#8290](https://github.com/dbt-labs/dbt-core/issues/8290))
|
|
||||||
- Remove legacy logger ([#8027](https://github.com/dbt-labs/dbt-core/issues/8027))
|
|
||||||
- Unit tests support --defer and state:modified ([#8517](https://github.com/dbt-labs/dbt-core/issues/8517))
|
|
||||||
- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956))
|
|
||||||
- Support source inputs in unit tests ([#8507](https://github.com/dbt-labs/dbt-core/issues/8507))
|
|
||||||
- Use daff to render diff displayed in stdout when unit test fails ([#8558](https://github.com/dbt-labs/dbt-core/issues/8558))
|
|
||||||
- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
|
||||||
- Move unit testing to test command ([#8979](https://github.com/dbt-labs/dbt-core/issues/8979))
|
|
||||||
- Support --empty flag for schema-only dry runs ([#8971](https://github.com/dbt-labs/dbt-core/issues/8971))
|
|
||||||
- Support unit tests in non-root packages ([#8285](https://github.com/dbt-labs/dbt-core/issues/8285))
|
|
||||||
- Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. in schema files. ([#8699](https://github.com/dbt-labs/dbt-core/issues/8699))
|
|
||||||
- Make fixture files full-fledged parts of the manifest and enable partial parsing ([#9067](https://github.com/dbt-labs/dbt-core/issues/9067))
|
|
||||||
- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
|
||||||
- Package selector syntax for the current package ([#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
|
||||||
- In build command run unit tests before models ([#9128](https://github.com/dbt-labs/dbt-core/issues/9128))
|
|
||||||
- Move flags from UserConfig in profiles.yml to flags in dbt_project.yml ([#9183](https://github.com/dbt-labs/dbt-core/issues/9183))
|
|
||||||
- Added hook support for `dbt source freshness` ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
|
||||||
- Align with order of unit test output when `actual` differs from `expected` ([#9370](https://github.com/dbt-labs/dbt-core/issues/9370))
|
|
||||||
- Added support for external nodes in unit test nodes ([#8944](https://github.com/dbt-labs/dbt-core/issues/8944))
|
|
||||||
- Enable unit testing versioned models ([#9344](https://github.com/dbt-labs/dbt-core/issues/9344))
|
|
||||||
- Enable list command for unit tests ([#8508](https://github.com/dbt-labs/dbt-core/issues/8508))
|
|
||||||
- Integration Test Optimizations ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
|
||||||
- Accelerate integration tests with caching. ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
|
||||||
- Cache environment variables ([#9489](https://github.com/dbt-labs/dbt-core/issues/9489))
|
|
||||||
- Support meta at the config level for Metric nodes ([#9441](https://github.com/dbt-labs/dbt-core/issues/9441))
|
|
||||||
- Add cache to SavedQuery config ([#9540](https://github.com/dbt-labs/dbt-core/issues/9540))
|
|
||||||
- Support scrubbing secret vars ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
|
||||||
- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237))
|
|
||||||
- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766))
|
|
||||||
- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804))
|
|
||||||
- Add wildcard support to the group selector method ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
|
||||||
- source freshness precomputes metadata-based freshness in batch, if possible ([#8705](https://github.com/dbt-labs/dbt-core/issues/8705))
|
|
||||||
- Better error message when trying to select a disabled model ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
|
||||||
- Support SQL in unit testing fixtures ([#9405](https://github.com/dbt-labs/dbt-core/issues/9405))
|
|
||||||
- Add require_explicit_package_overrides_for_builtin_materializations to dbt_project.yml flags, which can be used to opt-out of overriding built-in materializations from packages ([#10007](https://github.com/dbt-labs/dbt-core/issues/10007))
|
|
||||||
- add --empty flag to dbt build command ([#10026](https://github.com/dbt-labs/dbt-core/issues/10026))
|
|
||||||
- Ability to `silence` warnings via `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
|
||||||
- Allow aliases `error` for `include` and `warn` for `exclude` in `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
|
||||||
- Add unit_test: selection method ([#10053](https://github.com/dbt-labs/dbt-core/issues/10053))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- For packages installed with tarball method, fetch metadata to resolve nested dependencies ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
|
||||||
- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859))
|
|
||||||
- Handle unknown `type_code` for model contracts ([#8877](https://github.com/dbt-labs/dbt-core/issues/8877), [#8353](https://github.com/dbt-labs/dbt-core/issues/8353))
|
|
||||||
- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846))
|
|
||||||
- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857))
|
|
||||||
- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836))
|
|
||||||
- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939))
|
|
||||||
- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864))
|
|
||||||
- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895))
|
|
||||||
- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974))
|
|
||||||
- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010))
|
|
||||||
- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016))
|
|
||||||
- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000))
|
|
||||||
- Use seed file from disk for unit testing if rows not specified in YAML config ([#8652](https://github.com/dbt-labs/dbt-core/issues/8652))
|
|
||||||
- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062))
|
|
||||||
- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050))
|
|
||||||
- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127))
|
|
||||||
- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991))
|
|
||||||
- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119))
|
|
||||||
- Fix parsing f-strings in python models ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
|
||||||
- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
|
||||||
- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
|
||||||
- fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
|
||||||
- Support reasonably long unit test names ([#9015](https://github.com/dbt-labs/dbt-core/issues/9015))
|
|
||||||
- Fix back-compat parsing for model-level 'tests', source table-level 'tests', and 'tests' defined on model versions ([#9411](https://github.com/dbt-labs/dbt-core/issues/9411))
|
|
||||||
- Fix retry command run from CLI ([#9444](https://github.com/dbt-labs/dbt-core/issues/9444))
|
|
||||||
- Fix seed and source selection in `dbt docs generate` ([#9161](https://github.com/dbt-labs/dbt-core/issues/9161))
|
|
||||||
- Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection during docs generate ([#9456](https://github.com/dbt-labs/dbt-core/issues/9456))
|
|
||||||
- Fix node type plurals in FoundStats log message ([#9464](https://github.com/dbt-labs/dbt-core/issues/9464))
|
|
||||||
- Run manifest upgrade preprocessing on any older manifest version, including v11 ([#9487](https://github.com/dbt-labs/dbt-core/issues/9487))
|
|
||||||
- Update 'compiled_code' context member logic to route based on command ('clone' or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. ([#9502](https://github.com/dbt-labs/dbt-core/issues/9502))
|
|
||||||
- Fix bug where Semantic Layer filter strings are parsed into lists. ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507))
|
|
||||||
- Initialize invocation context before test fixtures are built. ([##9489](https://github.com/dbt-labs/dbt-core/issues/#9489))
|
|
||||||
- Fix conflict with newer versions of Snowplow tracker ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
|
||||||
- When patching versioned models, set constraints after config ([#9364](https://github.com/dbt-labs/dbt-core/issues/9364))
|
|
||||||
- only include unmodified semantic mdodels in state:modified selection ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
|
||||||
- Set query headers when manifest is passed in to dbtRunner ([#9546](https://github.com/dbt-labs/dbt-core/issues/9546))
|
|
||||||
- Store node_info in node associated logging events ([#9557](https://github.com/dbt-labs/dbt-core/issues/9557))
|
|
||||||
- Fix Semantic Model Compare node relations ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
|
||||||
- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583))
|
|
||||||
- Clearer no-op logging in stubbed SavedQueryRunner ([#9533](https://github.com/dbt-labs/dbt-core/issues/9533))
|
|
||||||
- Fix node_info contextvar handling so incorrect node_info doesn't persist ([#8866](https://github.com/dbt-labs/dbt-core/issues/8866))
|
|
||||||
- Add target-path to retry ([#8948](https://github.com/dbt-labs/dbt-core/issues/8948))
|
|
||||||
- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360))
|
|
||||||
- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581))
|
|
||||||
- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
|
||||||
- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
|
||||||
- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860))
|
|
||||||
- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532))
|
|
||||||
- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755))
|
|
||||||
- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624))
|
|
||||||
- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511))
|
|
||||||
- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593))
|
|
||||||
- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770))
|
|
||||||
- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787))
|
|
||||||
- Only create the packages-install-path / dbt_packages folder during dbt deps ([#6985](https://github.com/dbt-labs/dbt-core/issues/6985), [#9584](https://github.com/dbt-labs/dbt-core/issues/9584))
|
|
||||||
- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608))
|
|
||||||
- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078))
|
|
||||||
- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827))
|
|
||||||
- Exclude password-like fields for considering reparse ([#9795](https://github.com/dbt-labs/dbt-core/issues/9795))
|
|
||||||
- Fixed query comments test ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
|
||||||
- Begin warning people about spaces in model names ([#9397](https://github.com/dbt-labs/dbt-core/issues/9397))
|
|
||||||
- Add NodeRelation to SavedQuery Export ([#9534](https://github.com/dbt-labs/dbt-core/issues/9534))
|
|
||||||
- Disambiguiate FreshnessConfigProblem error message ([#9891](https://github.com/dbt-labs/dbt-core/issues/9891))
|
|
||||||
- Use consistent secret scrubbing with the log function. ([#9987](https://github.com/dbt-labs/dbt-core/issues/9987))
|
|
||||||
- Validate against empty strings in package definitions ([#9985](https://github.com/dbt-labs/dbt-core/issues/9985))
|
|
||||||
- Fix default value for indirect selection in selector cannot overwritten by CLI flag and env var ([#9976](https://github.com/dbt-labs/dbt-core/issues/9976), [#7673](https://github.com/dbt-labs/dbt-core/issues/7673))
|
|
||||||
- Simplify error message if test severity isn't 'warn' or 'error' ([#9715](https://github.com/dbt-labs/dbt-core/issues/9715))
|
|
||||||
- Support overriding source level loaded_at_field with a null table level definition ([#9320](https://github.com/dbt-labs/dbt-core/issues/9320))
|
|
||||||
- Undo conditional agate import to prevent UnresolvedTypeReferenceError during RunResult serialization ([#10098](https://github.com/dbt-labs/dbt-core/issues/10098))
|
|
||||||
- Restore previous behavior for --favor-state: only favor defer_relation if not selected in current command" ([#10107](https://github.com/dbt-labs/dbt-core/issues/10107))
|
|
||||||
- Unit test fixture (csv) returns null for empty value ([#9881](https://github.com/dbt-labs/dbt-core/issues/9881))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430))
|
|
||||||
- fix get_custom_database docstring ([dbt-docs/#9003](https://github.com/dbt-labs/dbt-docs/issues/9003))
|
|
||||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
|
||||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537))
|
|
||||||
- Add unit testing functional tests ([#8512](https://github.com/dbt-labs/dbt-core/issues/8512))
|
|
||||||
- Remove usage of dbt.include.global_project in dbt/adapters ([#8925](https://github.com/dbt-labs/dbt-core/issues/8925))
|
|
||||||
- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893))
|
|
||||||
- remove dbt.flags.MP_CONTEXT usage in dbt/adapters ([#8967](https://github.com/dbt-labs/dbt-core/issues/8967))
|
|
||||||
- Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters ([#8969](https://github.com/dbt-labs/dbt-core/issues/8969))
|
|
||||||
- Move CatalogRelationTypes test case to the shared test suite to be reused by adapter maintainers ([#8952](https://github.com/dbt-labs/dbt-core/issues/8952))
|
|
||||||
- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
|
||||||
- Removing unused 'documentable' ([#8871](https://github.com/dbt-labs/dbt-core/issues/8871))
|
|
||||||
- Remove use of dbt/core exceptions in dbt/adapter ([#8920](https://github.com/dbt-labs/dbt-core/issues/8920))
|
|
||||||
- Cache dbt plugin modules to improve integration test performance ([#9029](https://github.com/dbt-labs/dbt-core/issues/9029))
|
|
||||||
- Consolidate deferral methods & flags ([#7965](https://github.com/dbt-labs/dbt-core/issues/7965), [#8715](https://github.com/dbt-labs/dbt-core/issues/8715))
|
|
||||||
- Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock variance ([#9057](https://github.com/dbt-labs/dbt-core/issues/9057))
|
|
||||||
- Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific event types and protos ([#8927](https://github.com/dbt-labs/dbt-core/issues/8927), [#8918](https://github.com/dbt-labs/dbt-core/issues/8918))
|
|
||||||
- Clean up unused adaptor folders ([#9123](https://github.com/dbt-labs/dbt-core/issues/9123))
|
|
||||||
- Move column constraints into common/contracts, removing another dependency of adapters on core. ([#9024](https://github.com/dbt-labs/dbt-core/issues/9024))
|
|
||||||
- Move dbt.semver to dbt.common.semver and update references. ([#9039](https://github.com/dbt-labs/dbt-core/issues/9039))
|
|
||||||
- Move lowercase utils method to common ([#9180](https://github.com/dbt-labs/dbt-core/issues/9180))
|
|
||||||
- Remove usages of dbt.clients.jinja in dbt/adapters ([#9205](https://github.com/dbt-labs/dbt-core/issues/9205))
|
|
||||||
- Remove usage of dbt.contracts in dbt/adapters ([#9208](https://github.com/dbt-labs/dbt-core/issues/9208))
|
|
||||||
- Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters ([#9214](https://github.com/dbt-labs/dbt-core/issues/9214))
|
|
||||||
- Introduce RelationConfig Protocol, consolidate Relation.create_from ([#9215](https://github.com/dbt-labs/dbt-core/issues/9215))
|
|
||||||
- remove manifest from adapter.set_relations_cache signature ([#9217](https://github.com/dbt-labs/dbt-core/issues/9217))
|
|
||||||
- remove manifest from adapter catalog method signatures ([#9218](https://github.com/dbt-labs/dbt-core/issues/9218))
|
|
||||||
- Move BaseConfig, Metadata and various other contract classes from model_config to common/contracts/config ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
|
||||||
- Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro ([#9244](https://github.com/dbt-labs/dbt-core/issues/9244))
|
|
||||||
- pass query header context to MacroQueryStringSetter ([#9249](https://github.com/dbt-labs/dbt-core/issues/9249), [#9250](https://github.com/dbt-labs/dbt-core/issues/9250))
|
|
||||||
- add macro_context_generator on adapter ([#9247](https://github.com/dbt-labs/dbt-core/issues/9247))
|
|
||||||
- pass mp_context to adapter factory as argument instead of import ([#9025](https://github.com/dbt-labs/dbt-core/issues/9025))
|
|
||||||
- have dbt-postgres use RelationConfig protocol for materialized views' ([#9292](https://github.com/dbt-labs/dbt-core/issues/9292))
|
|
||||||
- move system.py to common as dbt-bigquery relies on it to call gcloud ([#9293](https://github.com/dbt-labs/dbt-core/issues/9293))
|
|
||||||
- Reorganizing event definitions to define core events in dbt/events rather than dbt/common ([#9152](https://github.com/dbt-labs/dbt-core/issues/9152))
|
|
||||||
- move exceptions used only in dbt/common to dbt/common/exceptions ([#9332](https://github.com/dbt-labs/dbt-core/issues/9332))
|
|
||||||
- Remove usage of dbt.adapters.factory in dbt/common ([#9334](https://github.com/dbt-labs/dbt-core/issues/9334))
|
|
||||||
- Accept valid_error_names in WarnErrorOptions constructor, remove global usage of event modules ([#9337](https://github.com/dbt-labs/dbt-core/issues/9337))
|
|
||||||
- Move result objects to dbt.artifacts ([#9193](https://github.com/dbt-labs/dbt-core/issues/9193))
|
|
||||||
- dbt Labs OSS standardization of docs and templates. ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
|
||||||
- Add dbt-common as a dependency and remove dbt/common ([#9357](https://github.com/dbt-labs/dbt-core/issues/9357))
|
|
||||||
- move cache exceptions to dbt/adapters ([#9362](https://github.com/dbt-labs/dbt-core/issues/9362))
|
|
||||||
- Clean up macro contexts. ([#9422](https://github.com/dbt-labs/dbt-core/issues/9422))
|
|
||||||
- Add the @requires.manifest decorator to the retry command. ([#9426](https://github.com/dbt-labs/dbt-core/issues/9426))
|
|
||||||
- Move WritableManifest + Documentation to dbt/artifacts ([#9378](https://github.com/dbt-labs/dbt-core/issues/9378), [#9379](https://github.com/dbt-labs/dbt-core/issues/9379))
|
|
||||||
- Define Macro and Group resources in dbt/artifacts ([#9381](https://github.com/dbt-labs/dbt-core/issues/9381), [#9382](https://github.com/dbt-labs/dbt-core/issues/9382))
|
|
||||||
- Move `SavedQuery` data definition to `dbt/artifacts` ([#9386](https://github.com/dbt-labs/dbt-core/issues/9386))
|
|
||||||
- Migrate data parts of `Metric` node to dbt/artifacts ([#9383](https://github.com/dbt-labs/dbt-core/issues/9383))
|
|
||||||
- Move data portion of `SemanticModel` to dbt/artifacts ([#9387](https://github.com/dbt-labs/dbt-core/issues/9387))
|
|
||||||
- Move data parts of `Exposure` class to dbt/artifacts ([#9380](https://github.com/dbt-labs/dbt-core/issues/9380))
|
|
||||||
- Split up deferral across parsing (adding 'defer_relation' from state manifest) and runtime ref resolution" ([#9199](https://github.com/dbt-labs/dbt-core/issues/9199))
|
|
||||||
- Start using `Mergeable` from dbt-common ([#9505](https://github.com/dbt-labs/dbt-core/issues/9505))
|
|
||||||
- Move manifest nodes to artifacts ([#9388](https://github.com/dbt-labs/dbt-core/issues/9388))
|
|
||||||
- Move data parts of `SourceDefinition` class to dbt/artifacts ([#9384](https://github.com/dbt-labs/dbt-core/issues/9384))
|
|
||||||
- Remove uses of Replaceable class ([#7802](https://github.com/dbt-labs/dbt-core/issues/7802))
|
|
||||||
- Make dbt-core compatible with Python 3.12 ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
|
||||||
- Restrict protobuf to major version 4. ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
|
||||||
- Remove references to dbt.tracking and dbt.flags from dbt/artifacts ([#9390](https://github.com/dbt-labs/dbt-core/issues/9390))
|
|
||||||
- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
|
||||||
- Implement primary key inference for model nodes ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
|
||||||
- Define UnitTestDefinition resource in dbt/artifacts/resources ([#9667](https://github.com/dbt-labs/dbt-core/issues/9667))
|
|
||||||
- Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest ([#9567](https://github.com/dbt-labs/dbt-core/issues/9567))
|
|
||||||
- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
|
||||||
- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619))
|
|
||||||
- Remove non dbt.artifacts dbt.* imports from dbt/artifacts ([#9926](https://github.com/dbt-labs/dbt-core/issues/9926))
|
|
||||||
- Migrate to using `error_tag` provided by `dbt-common` ([#9914](https://github.com/dbt-labs/dbt-core/issues/9914))
|
|
||||||
- Add a test for semantic manifest and move test fixtures needed for it ([#9665](https://github.com/dbt-labs/dbt-core/issues/9665))
|
|
||||||
- Raise deprecation warning if installed package overrides built-in materialization ([#9971](https://github.com/dbt-labs/dbt-core/issues/9971))
|
|
||||||
- Use the SECRET_ENV_PREFIX from dbt_common instead of duplicating it in dbt-core ([#10018](https://github.com/dbt-labs/dbt-core/issues/10018))
|
|
||||||
- Enable use of record mode via environment variable ([#10045](https://github.com/dbt-labs/dbt-core/issues/10045))
|
|
||||||
- Consistent naming + deprecation warnings for "legacy behavior" flags ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
|
||||||
- Enable use of context in serialization ([#10093](https://github.com/dbt-labs/dbt-core/issues/10093))
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Bump actions/checkout from 3 to 4 ([#8781](https://github.com/dbt-labs/dbt-core/issues/8781))
|
|
||||||
- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
|
||||||
- Update typing-extensions version to >=4.4 ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012))
|
|
||||||
- Bump ddtrace from 2.1.7 to 2.3.0 ([#9132](https://github.com/dbt-labs/dbt-core/issues/9132))
|
|
||||||
- Bump freezegun from 0.3.12 to 1.3.0 ([#9197](https://github.com/dbt-labs/dbt-core/issues/9197))
|
|
||||||
- Bump actions/setup-python from 4 to 5 ([#9267](https://github.com/dbt-labs/dbt-core/issues/9267))
|
|
||||||
- Bump actions/download-artifact from 3 to 4 ([#9374](https://github.com/dbt-labs/dbt-core/issues/9374))
|
|
||||||
- Relax pathspec upper bound version restriction ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
|
||||||
- remove dbt/adapters and add dependency on dbt-adapters ([#9430](https://github.com/dbt-labs/dbt-core/issues/9430))
|
|
||||||
- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/issues/9470))
|
|
||||||
- Bump actions/cache from 3 to 4 ([#9471](https://github.com/dbt-labs/dbt-core/issues/9471))
|
|
||||||
- Bump peter-evans/create-pull-request from 5 to 6 ([#9552](https://github.com/dbt-labs/dbt-core/issues/9552))
|
|
||||||
- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
|
||||||
- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/issues/9659))
|
|
||||||
- Cap dbt-semantic-interfaces version range to <0.6 ([#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
|
||||||
- Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker ([#9687](https://github.com/dbt-labs/dbt-core/issues/9687))
|
|
||||||
- bump dbt-common to accept major version 1 ([#9690](https://github.com/dbt-labs/dbt-core/issues/9690))
|
|
||||||
- Remove duplicate dependency of protobuf in dev-requirements ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
|
||||||
- Bump black from 23.3.0 to >=24.3.0,<25.0 ([#8074](https://github.com/dbt-labs/dbt-core/issues/8074))
|
|
||||||
- Update the agate pin to "agate>=1.7.0,<1.10" ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 ([#9638](https://github.com/dbt-labs/dbt-core/issues/9638))
|
|
||||||
- Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@LeoTheGriff](https://github.com/LeoTheGriff) ([#9003](https://github.com/dbt-labs/dbt-core/issues/9003))
|
|
||||||
- [@SamuelBFavarin](https://github.com/SamuelBFavarin) ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
|
||||||
- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
|
||||||
- [@adamlopez](https://github.com/adamlopez) ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
|
||||||
- [@akurdyukov](https://github.com/akurdyukov) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
|
||||||
- [@aliceliu](https://github.com/aliceliu) ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
|
||||||
- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
|
||||||
- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430))
|
|
||||||
- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798), [#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
|
||||||
- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
|
||||||
- [@colin-rorgers-dbt](https://github.com/colin-rorgers-dbt) ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
|
||||||
- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9507](https://github.com/dbt-labs/dbt-core/issues/9507), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
|
||||||
- [@damian3031](https://github.com/damian3031) ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
|
||||||
- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
|
||||||
- [@edgarrmondragon](https://github.com/edgarrmondragon) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
|
||||||
- [@emmoop](https://github.com/emmoop) ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
|
||||||
- [@heysweet](https://github.com/heysweet) ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
|
||||||
- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319), [#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
|
||||||
- [@l1xnan](https://github.com/l1xnan) ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
|
||||||
- [@mederka](https://github.com/mederka) ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
|
||||||
- [@mjkanji](https://github.com/mjkanji) ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
|
||||||
- [@nielspardon](https://github.com/nielspardon) ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
|
||||||
- [@niteshy](https://github.com/niteshy) ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
|
||||||
- [@ofek1weiss](https://github.com/ofek1weiss) ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
|
||||||
- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
|
||||||
- [@rzjfr](https://github.com/rzjfr) ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
|
||||||
- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
|
||||||
- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012), [#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
|
||||||
- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
## dbt-core 1.8.1 - May 22, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Add resource type to saved_query ([#10168](https://github.com/dbt-labs/dbt-core/issues/10168))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
|
||||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
|
||||||
- Add support for Saved Query node ([dbt-docs/#486](https://github.com/dbt-labs/dbt-docs/issues/486))
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Explicitly bind to localhost in docs serve ([#10209](https://github.com/dbt-labs/dbt-core/issues/10209))
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
## dbt-core 1.8.2 - June 05, 2024
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Add --host flag to dbt docs serve, defaulting to '127.0.0.1' ([#10229](https://github.com/dbt-labs/dbt-core/issues/10229))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Fix: Order-insensitive unit test equality assertion for expected/actual with multiple nulls ([#10167](https://github.com/dbt-labs/dbt-core/issues/10167))
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
## dbt-core 1.8.3 - June 20, 2024
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- add --empty value to jinja context as flags.EMPTY ([#10317](https://github.com/dbt-labs/dbt-core/issues/10317))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- DOn't warn on `unit_test` config paths that are properly used ([#10311](https://github.com/dbt-labs/dbt-core/issues/10311))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Fix npm security vulnerabilities as of June 2024 ([dbt-docs/#513](https://github.com/dbt-labs/dbt-docs/issues/513))
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
## dbt-core 1.8.4 - July 18, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags ([#10160](https://github.com/dbt-labs/dbt-core/issues/10160))
|
|
||||||
- Limit data_tests deprecation to root_project ([#9835](https://github.com/dbt-labs/dbt-core/issues/9835))
|
|
||||||
- CLI flags should take precedence over env var flags ([#10304](https://github.com/dbt-labs/dbt-core/issues/10304))
|
|
||||||
- Fix error constructing warn_error_options ([#10452](https://github.com/dbt-labs/dbt-core/issues/10452))
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
## dbt-core 1.8.5 - August 07, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- respect --quiet and --warn-error-options for flag deprecations ([#10105](https://github.com/dbt-labs/dbt-core/issues/10105))
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
## dbt-core 1.8.6 - August 29, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Late render pre- and post-hooks configs in properties / schema YAML files ([#10603](https://github.com/dbt-labs/dbt-core/issues/10603))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Improve speed of tree traversal when finding children, increasing build speed for some selectors ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@ttusing](https://github.com/ttusing) ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
## dbt-core 1.8.7 - September 24, 2024
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Add support for behavior flags ([#10618](https://github.com/dbt-labs/dbt-core/issues/10618))
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
## dbt-core 1.8.8 - October 23, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Fix unit tests for incremental model with alias ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Remove support and testing for Python 3.8, which is now EOL. ([#10861](https://github.com/dbt-labs/dbt-core/issues/10861))
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Pin dbt-common and dbt-adapters with upper bound. ([#10895](https://github.com/dbt-labs/dbt-core/issues/10895))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@katsugeneration](https://github.com/katsugeneration) ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
## dbt-core 1.8.9 - November 21, 2024
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Pin dbt-common and dbt-adapters with upper bound of 2.0. ([#11024](https://github.com/dbt-labs/dbt-core/issues/11024))
|
|
||||||
6
.changes/unreleased/Dependencies-20251118-155354.yaml
Normal file
6
.changes/unreleased/Dependencies-20251118-155354.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Use EventCatcher from dbt-common instead of maintaining a local copy
|
||||||
|
time: 2025-11-18T15:53:54.284561+05:30
|
||||||
|
custom:
|
||||||
|
Author: 3loka
|
||||||
|
Issue: "12124"
|
||||||
6
.changes/unreleased/Features-20251006-140352.yaml
Normal file
6
.changes/unreleased/Features-20251006-140352.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support partial parsing for function nodes
|
||||||
|
time: 2025-10-06T14:03:52.258104-05:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12072"
|
||||||
6
.changes/unreleased/Features-20251117-141053.yaml
Normal file
6
.changes/unreleased/Features-20251117-141053.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Allow for defining funciton arguments with default values
|
||||||
|
time: 2025-11-17T14:10:53.860178-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12044"
|
||||||
6
.changes/unreleased/Features-20251201-165209.yaml
Normal file
6
.changes/unreleased/Features-20251201-165209.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Raise jsonschema-based deprecation warnings by default
|
||||||
|
time: 2025-12-01T16:52:09.354436-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: 12240
|
||||||
6
.changes/unreleased/Features-20251203-122926.yaml
Normal file
6
.changes/unreleased/Features-20251203-122926.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: ':bug: :snowman: Disable unit tests whose model is disabled'
|
||||||
|
time: 2025-12-03T12:29:26.209248-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10540"
|
||||||
6
.changes/unreleased/Features-20251210-202001.yaml
Normal file
6
.changes/unreleased/Features-20251210-202001.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Implement config.meta_get and config.meta_require
|
||||||
|
time: 2025-12-10T20:20:01.354288-05:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "12012"
|
||||||
6
.changes/unreleased/Fixes-20250922-151726.yaml
Normal file
6
.changes/unreleased/Fixes-20250922-151726.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Address Click 8.2+ deprecation warning
|
||||||
|
time: 2025-09-22T15:17:26.983151-06:00
|
||||||
|
custom:
|
||||||
|
Author: edgarrmondragon
|
||||||
|
Issue: "12038"
|
||||||
6
.changes/unreleased/Fixes-20251117-140649.yaml
Normal file
6
.changes/unreleased/Fixes-20251117-140649.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Include macros in unit test parsing
|
||||||
|
time: 2025-11-17T14:06:49.518566-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark nathanskone
|
||||||
|
Issue: "10157"
|
||||||
6
.changes/unreleased/Fixes-20251117-185025.yaml
Normal file
6
.changes/unreleased/Fixes-20251117-185025.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Allow dbt deps to run when vars lack defaults in dbt_project.yml
|
||||||
|
time: 2025-11-17T18:50:25.759091+05:30
|
||||||
|
custom:
|
||||||
|
Author: 3loka
|
||||||
|
Issue: "8913"
|
||||||
6
.changes/unreleased/Fixes-20251118-171106.yaml
Normal file
6
.changes/unreleased/Fixes-20251118-171106.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names`
|
||||||
|
time: 2025-11-18T17:11:06.454784-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "12152"
|
||||||
6
.changes/unreleased/Fixes-20251119-195034.yaml
Normal file
6
.changes/unreleased/Fixes-20251119-195034.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Allow the usage of `function` with `--exclude-resource-type` flag
|
||||||
|
time: 2025-11-19T19:50:34.703236-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12143"
|
||||||
6
.changes/unreleased/Fixes-20251124-155629.yaml
Normal file
6
.changes/unreleased/Fixes-20251124-155629.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix bug where schemas of functions weren't guaranteed to exist
|
||||||
|
time: 2025-11-24T15:56:29.467004-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12142"
|
||||||
6
.changes/unreleased/Fixes-20251124-155756.yaml
Normal file
6
.changes/unreleased/Fixes-20251124-155756.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix generation of deprecations summary
|
||||||
|
time: 2025-11-24T15:57:56.544123-08:00
|
||||||
|
custom:
|
||||||
|
Author: asiunov
|
||||||
|
Issue: "12146"
|
||||||
6
.changes/unreleased/Fixes-20251124-170855.yaml
Normal file
6
.changes/unreleased/Fixes-20251124-170855.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Correctly reference foreign key references when --defer and --state provided'
|
||||||
|
time: 2025-11-24T17:08:55.387946-05:00
|
||||||
|
custom:
|
||||||
|
Author: michellark
|
||||||
|
Issue: "11885"
|
||||||
7
.changes/unreleased/Fixes-20251125-120246.yaml
Normal file
7
.changes/unreleased/Fixes-20251125-120246.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Add exception when using --state and referring to a removed
|
||||||
|
test'
|
||||||
|
time: 2025-11-25T12:02:46.635026-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "10630"
|
||||||
6
.changes/unreleased/Fixes-20251125-122020.yaml
Normal file
6
.changes/unreleased/Fixes-20251125-122020.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command'
|
||||||
|
time: 2025-11-25T12:20:20.132379-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11627"
|
||||||
6
.changes/unreleased/Fixes-20251127-141308.yaml
Normal file
6
.changes/unreleased/Fixes-20251127-141308.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ":bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements"
|
||||||
|
time: 2025-11-27T14:13:08.082542-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "12049"
|
||||||
7
.changes/unreleased/Fixes-20251127-145929.yaml
Normal file
7
.changes/unreleased/Fixes-20251127-145929.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g.,
|
||||||
|
varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo'
|
||||||
|
time: 2025-11-27T14:59:29.256274-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "11186"
|
||||||
6
.changes/unreleased/Fixes-20251127-170124.yaml
Normal file
6
.changes/unreleased/Fixes-20251127-170124.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Support unit testing models that depend on sources with the same name'
|
||||||
|
time: 2025-11-27T17:01:24.193516-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: 11975 10433
|
||||||
6
.changes/unreleased/Fixes-20251128-102129.yaml
Normal file
6
.changes/unreleased/Fixes-20251128-102129.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test
|
||||||
|
time: 2025-11-28T10:21:29.911147Z
|
||||||
|
custom:
|
||||||
|
Author: mattogburke
|
||||||
|
Issue: "12223"
|
||||||
6
.changes/unreleased/Fixes-20251128-122838.yaml
Normal file
6
.changes/unreleased/Fixes-20251128-122838.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Avoid retrying successful run-operation commands'
|
||||||
|
time: 2025-11-28T12:28:38.546261-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "11850"
|
||||||
7
.changes/unreleased/Fixes-20251128-161937.yaml
Normal file
7
.changes/unreleased/Fixes-20251128-161937.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned:
|
||||||
|
false`'
|
||||||
|
time: 2025-11-28T16:19:37.608722-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "9104"
|
||||||
7
.changes/unreleased/Fixes-20251128-163144.yaml
Normal file
7
.changes/unreleased/Fixes-20251128-163144.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better
|
||||||
|
cross-source matching and word boundaries'
|
||||||
|
time: 2025-11-28T16:31:44.344099-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "12239"
|
||||||
6
.changes/unreleased/Fixes-20251202-133705.yaml
Normal file
6
.changes/unreleased/Fixes-20251202-133705.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: ':bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs'
|
||||||
|
time: 2025-12-02T13:37:05.012112-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "12244"
|
||||||
6
.changes/unreleased/Fixes-20251209-175031.yaml
Normal file
6
.changes/unreleased/Fixes-20251209-175031.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Ensure recent deprecation warnings include event name in message
|
||||||
|
time: 2025-12-09T17:50:31.334618-06:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "12264"
|
||||||
6
.changes/unreleased/Fixes-20251210-143935.yaml
Normal file
6
.changes/unreleased/Fixes-20251210-143935.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Improve error message clarity when detecting nodes with space in name
|
||||||
|
time: 2025-12-10T14:39:35.107841-08:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "11835"
|
||||||
6
.changes/unreleased/Under the Hood-20251119-110110.yaml
Normal file
6
.changes/unreleased/Under the Hood-20251119-110110.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Update jsonschemas for schema.yml and dbt_project.yml deprecations
|
||||||
|
time: 2025-11-19T11:01:10.616676-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "12180"
|
||||||
6
.changes/unreleased/Under the Hood-20251121-140515.yaml
Normal file
6
.changes/unreleased/Under the Hood-20251121-140515.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Replace setuptools and tox with hatch for build, test, and environment management.
|
||||||
|
time: 2025-11-21T14:05:15.838252-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "12151"
|
||||||
6
.changes/unreleased/Under the Hood-20251209-131857.yaml
Normal file
6
.changes/unreleased/Under the Hood-20251209-131857.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Add add_catalog_integration call even if we have a pre-existing manifest
|
||||||
|
time: 2025-12-09T13:18:57.043254-08:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "12262"
|
||||||
@@ -41,32 +41,26 @@ newlines:
|
|||||||
endOfVersion: 1
|
endOfVersion: 1
|
||||||
|
|
||||||
custom:
|
custom:
|
||||||
- key: Author
|
- key: Author
|
||||||
label: GitHub Username(s) (separated by a single space if multiple)
|
label: GitHub Username(s) (separated by a single space if multiple)
|
||||||
type: string
|
type: string
|
||||||
minLength: 3
|
minLength: 3
|
||||||
- key: Issue
|
- key: Issue
|
||||||
label: GitHub Issue Number (separated by a single space if multiple)
|
label: GitHub Issue Number (separated by a single space if multiple)
|
||||||
type: string
|
type: string
|
||||||
minLength: 1
|
minLength: 1
|
||||||
|
|
||||||
footerFormat: |
|
footerFormat: |
|
||||||
{{- $contributorDict := dict }}
|
{{- $contributorDict := dict }}
|
||||||
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
{{- /* ensure we always skip snyk and dependabot */}}
|
||||||
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
{{- $bots := list "dependabot[bot]" "snyk-bot"}}
|
||||||
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
|
||||||
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
|
||||||
{{- range $team_member := $core_team }}
|
|
||||||
{{- $team_member_lower := lower $team_member }}
|
|
||||||
{{- $maintainers = append $maintainers $team_member_lower }}
|
|
||||||
{{- end }}
|
|
||||||
{{- range $change := .Changes }}
|
{{- range $change := .Changes }}
|
||||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||||
{{- /* loop through all authors for a single changelog */}}
|
{{- /* loop through all authors for a single changelog */}}
|
||||||
{{- range $author := $authorList }}
|
{{- range $author := $authorList }}
|
||||||
{{- $authorLower := lower $author }}
|
{{- $authorLower := lower $author }}
|
||||||
{{- /* we only want to include non-core team contributors */}}
|
{{- /* we only want to include non-bot contributors */}}
|
||||||
{{- if not (has $authorLower $maintainers)}}
|
{{- if not (has $authorLower $bots)}}
|
||||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||||
{{- $IssueList := list }}
|
{{- $IssueList := list }}
|
||||||
{{- $changeLink := $change.Kind }}
|
{{- $changeLink := $change.Kind }}
|
||||||
|
|||||||
2
.flake8
2
.flake8
@@ -7,8 +7,8 @@ ignore =
|
|||||||
W503 # makes Flake8 work like black
|
W503 # makes Flake8 work like black
|
||||||
W504
|
W504
|
||||||
E203 # makes Flake8 work like black
|
E203 # makes Flake8 work like black
|
||||||
|
E704 # makes Flake8 work like black
|
||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test/
|
|
||||||
per-file-ignores =
|
per-file-ignores =
|
||||||
*/__init__.py: F401
|
*/__init__.py: F401
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
4
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -61,8 +61,8 @@ body:
|
|||||||
label: Environment
|
label: Environment
|
||||||
description: |
|
description: |
|
||||||
examples:
|
examples:
|
||||||
- **OS**: Ubuntu 20.04
|
- **OS**: Ubuntu 24.04
|
||||||
- **Python**: 3.9.12 (`python3 --version`)
|
- **Python**: 3.10.12 (`python3 --version`)
|
||||||
- **dbt-core**: 1.1.1 (`dbt --version`)
|
- **dbt-core**: 1.1.1 (`dbt --version`)
|
||||||
value: |
|
value: |
|
||||||
- OS:
|
- OS:
|
||||||
|
|||||||
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
Normal file
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: 📄 Code docs
|
||||||
|
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
|
||||||
|
title: "[Code docs] <title>"
|
||||||
|
labels: ["triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this code docs issue!
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Please describe the issue and your proposals.
|
||||||
|
description: |
|
||||||
|
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||||
|
|
||||||
|
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
18
.github/ISSUE_TEMPLATE/config.yml
vendored
18
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
|
- name: Documentation
|
||||||
|
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
|
||||||
|
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
|
||||||
- name: Ask the community for help
|
- name: Ask the community for help
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||||
about: Need help troubleshooting? Check out our guide on how to ask
|
about: Need help troubleshooting? Check out our guide on how to ask
|
||||||
@@ -9,15 +12,6 @@ contact_links:
|
|||||||
- name: Participate in Discussions
|
- name: Participate in Discussions
|
||||||
url: https://github.com/dbt-labs/dbt-core/discussions
|
url: https://github.com/dbt-labs/dbt-core/discussions
|
||||||
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
|
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
|
||||||
- name: Create an issue for dbt-redshift
|
- name: Create an issue for adapters
|
||||||
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
url: https://github.com/dbt-labs/dbt-adapters/issues/new/choose
|
||||||
about: Report a bug or request a feature for dbt-redshift
|
about: Report a bug or request a feature for an adapter
|
||||||
- name: Create an issue for dbt-bigquery
|
|
||||||
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
|
|
||||||
about: Report a bug or request a feature for dbt-bigquery
|
|
||||||
- name: Create an issue for dbt-snowflake
|
|
||||||
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
|
||||||
about: Report a bug or request a feature for dbt-snowflake
|
|
||||||
- name: Create an issue for dbt-spark
|
|
||||||
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
|
|
||||||
about: Report a bug or request a feature for dbt-spark
|
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
4
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
@@ -55,8 +55,8 @@ body:
|
|||||||
label: Environment
|
label: Environment
|
||||||
description: |
|
description: |
|
||||||
examples:
|
examples:
|
||||||
- **OS**: Ubuntu 20.04
|
- **OS**: Ubuntu 24.04
|
||||||
- **Python**: 3.9.12 (`python3 --version`)
|
- **Python**: 3.10.12 (`python3 --version`)
|
||||||
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
|
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
|
||||||
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
|
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
|
||||||
value: |
|
value: |
|
||||||
|
|||||||
8
.github/_README.md
vendored
8
.github/_README.md
vendored
@@ -120,7 +120,7 @@ Some triggers of note that we use:
|
|||||||
```yaml
|
```yaml
|
||||||
jobs:
|
jobs:
|
||||||
dependency_changelog:
|
dependency_changelog:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Get File Name Timestamp
|
- name: Get File Name Timestamp
|
||||||
@@ -188,6 +188,12 @@ ___
|
|||||||
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
|
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
|
||||||
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
|
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
|
||||||
|
|
||||||
|
### Runners
|
||||||
|
- We dynamically set runners based on repository vars. Admins can view repository vars and reset them. Current values are the following but are subject to change:
|
||||||
|
- `vars.UBUNTU_LATEST` -> `ubuntu-latest`
|
||||||
|
- `vars.WINDOWS_LATEST` -> `windows-latest`
|
||||||
|
- `vars.MACOS_LATEST` -> `macos-14`
|
||||||
|
|
||||||
### Actions from the Marketplace
|
### Actions from the Marketplace
|
||||||
- Don’t use external actions for things that can easily be accomplished manually.
|
- Don’t use external actions for things that can easily be accomplished manually.
|
||||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||||
|
|||||||
2
.github/actions/latest-wrangler/README.md
vendored
2
.github/actions/latest-wrangler/README.md
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Wrangle latest tag
|
- name: Wrangle latest tag
|
||||||
|
|||||||
@@ -3,24 +3,24 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
package:
|
package:
|
||||||
description: The package to publish
|
description: The package to publish
|
||||||
required: true
|
required: true
|
||||||
version_number:
|
version_number:
|
||||||
description: The version number
|
description: The version number
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Wrangle latest tag
|
- name: Wrangle latest tag
|
||||||
id: is_latest
|
id: is_latest
|
||||||
uses: ./.github/actions/latest-wrangler
|
uses: ./.github/actions/latest-wrangler
|
||||||
with:
|
with:
|
||||||
package: ${{ github.event.inputs.package }}
|
package: ${{ github.event.inputs.package }}
|
||||||
new_version: ${{ github.event.inputs.new_version }}
|
new_version: ${{ github.event.inputs.new_version }}
|
||||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Print the results
|
- name: Print the results
|
||||||
run: |
|
run: |
|
||||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||||
|
|||||||
5
.github/actions/latest-wrangler/main.py
vendored
5
.github/actions/latest-wrangler/main.py
vendored
@@ -1,9 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
from packaging.version import Version, parse
|
|
||||||
import requests
|
|
||||||
import sys
|
import sys
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from packaging.version import Version, parse
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
package_name: str = os.environ["INPUT_PACKAGE_NAME"]
|
package_name: str = os.environ["INPUT_PACKAGE_NAME"]
|
||||||
|
|||||||
10
.github/actions/setup-postgres-linux/action.yml
vendored
10
.github/actions/setup-postgres-linux/action.yml
vendored
@@ -1,10 +0,0 @@
|
|||||||
name: "Set up postgres (linux)"
|
|
||||||
description: "Set up postgres service on linux vm for dbt integration tests"
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- shell: bash
|
|
||||||
run: |
|
|
||||||
sudo systemctl start postgresql.service
|
|
||||||
pg_isready
|
|
||||||
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../../../test/setup_db.sh
|
|
||||||
26
.github/actions/setup-postgres-macos/action.yml
vendored
26
.github/actions/setup-postgres-macos/action.yml
vendored
@@ -1,26 +0,0 @@
|
|||||||
name: "Set up postgres (macos)"
|
|
||||||
description: "Set up postgres service on macos vm for dbt integration tests"
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- shell: bash
|
|
||||||
run: |
|
|
||||||
brew install postgresql@16
|
|
||||||
brew link postgresql@16 --force
|
|
||||||
brew services start postgresql@16
|
|
||||||
echo "Check PostgreSQL service is running"
|
|
||||||
i=10
|
|
||||||
COMMAND='pg_isready'
|
|
||||||
while [ $i -gt -1 ]; do
|
|
||||||
if [ $i == 0 ]; then
|
|
||||||
echo "PostgreSQL service not ready, all attempts exhausted"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Check PostgreSQL service status"
|
|
||||||
eval $COMMAND && break
|
|
||||||
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
|
|
||||||
sleep 10
|
|
||||||
((i--))
|
|
||||||
done
|
|
||||||
createuser -s postgres
|
|
||||||
bash ${{ github.action_path }}/setup_db.sh
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../../../test/setup_db.sh
|
|
||||||
@@ -5,8 +5,22 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- shell: pwsh
|
- shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
$pgService = Get-Service -Name postgresql*
|
Write-Host -Object "Installing PostgreSQL 16 as windows service..."
|
||||||
|
$installerArgs = @("--install_runtimes 0", "--superpassword root", "--enable_acledit 1", "--unattendedmodeui none", "--mode unattended")
|
||||||
|
$filePath = Invoke-DownloadWithRetry -Url "https://get.enterprisedb.com/postgresql/postgresql-16.1-1-windows-x64.exe" -Path "$env:PGROOT/postgresql-16.1-1-windows-x64.exe"
|
||||||
|
Start-Process -FilePath $filePath -ArgumentList $installerArgs -Wait -PassThru
|
||||||
|
|
||||||
|
Write-Host -Object "Validating PostgreSQL 16 Install..."
|
||||||
|
Get-Service -Name postgresql*
|
||||||
|
$pgReady = Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||||
|
$exitCode = $pgReady.ExitCode
|
||||||
|
if ($exitCode -ne 0) {
|
||||||
|
Write-Host -Object "PostgreSQL is not ready. Exitcode: $exitCode"
|
||||||
|
exit $exitCode
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host -Object "Starting PostgreSQL 16 Service..."
|
||||||
|
$pgService = Get-Service -Name postgresql-x64-16
|
||||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
|
||||||
$env:Path += ";$env:PGBIN"
|
$env:Path += ";$env:PGBIN"
|
||||||
bash ${{ github.action_path }}/setup_db.sh
|
bash ${{ github.action_path }}/setup_db.sh
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
../../../test/setup_db.sh
|
../../../scripts/setup_db.sh
|
||||||
169
.github/dbt-postgres-testing.yml
vendored
Normal file
169
.github/dbt-postgres-testing.yml
vendored
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# **what?**
|
||||||
|
# Runs all tests in dbt-postgres with this branch of dbt-core to ensure nothing is broken
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Ensure dbt-core changes do not break dbt-postgres, as a basic proxy for other adapters
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when trying to merge a PR into main.
|
||||||
|
# It can also be manually triggered.
|
||||||
|
|
||||||
|
# This workflow can be skipped by adding the "Skip Postgres Testing" label to the PR. This is
|
||||||
|
# useful when making a change in both `dbt-postgres` and `dbt-core` where the changes are dependant
|
||||||
|
# and cause the other repository to break.
|
||||||
|
|
||||||
|
name: "dbt-postgres Tests"
|
||||||
|
run-name: >-
|
||||||
|
${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call')
|
||||||
|
&& format('dbt-postgres@{0} with dbt-core@{1}', inputs.dbt-postgres-ref, inputs.dbt-core-ref)
|
||||||
|
|| 'dbt-postgres@main with dbt-core branch' }}
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "*.latest"
|
||||||
|
- "releases/*"
|
||||||
|
pull_request:
|
||||||
|
merge_group:
|
||||||
|
types: [checks_requested]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dbt-postgres-ref:
|
||||||
|
description: "The branch of dbt-postgres to test against"
|
||||||
|
default: "main"
|
||||||
|
dbt-core-ref:
|
||||||
|
description: "The branch of dbt-core to test against"
|
||||||
|
default: "main"
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
dbt-postgres-ref:
|
||||||
|
description: "The branch of dbt-postgres to test against"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
default: "main"
|
||||||
|
dbt-core-ref:
|
||||||
|
description: "The branch of dbt-core to test against"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
default: "main"
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
# will cancel previous workflows triggered by the same event
|
||||||
|
# and for the same ref for PRs/merges or same SHA otherwise
|
||||||
|
# and for the same inputs on workflow_dispatch or workflow_call
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(fromJson('["pull_request", "merge_group"]'), github.event_name) && github.event.pull_request.head.ref || github.sha }}-${{ contains(fromJson('["workflow_call", "workflow_dispatch"]'), github.event_name) && github.event.inputs.dbt-postgres-ref && github.event.inputs.dbt-core-ref || github.sha }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
job-prep:
|
||||||
|
# This allow us to run the workflow on pull_requests as well so we can always run unit tests
|
||||||
|
# and only run integration tests on merge for time purposes
|
||||||
|
name: Setup Repo Refs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
dbt-postgres-ref: ${{ steps.core-ref.outputs.ref }}
|
||||||
|
dbt-core-ref: ${{ steps.common-ref.outputs.ref }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Input Refs"
|
||||||
|
id: job-inputs
|
||||||
|
run: |
|
||||||
|
echo "inputs.dbt-postgres-ref=${{ inputs.dbt-postgres-ref }}"
|
||||||
|
echo "inputs.dbt-core-ref=${{ inputs.dbt-core-ref }}"
|
||||||
|
|
||||||
|
- name: "Determine dbt-postgres ref"
|
||||||
|
id: core-ref
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ inputs.dbt-postgres-ref }}" ]]; then
|
||||||
|
REF="main"
|
||||||
|
else
|
||||||
|
REF=${{ inputs.dbt-postgres-ref }}
|
||||||
|
fi
|
||||||
|
echo "ref=$REF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Determine dbt-core ref"
|
||||||
|
id: common-ref
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ inputs.dbt-core-ref }}" ]]; then
|
||||||
|
# these will be commits instead of branches
|
||||||
|
if [[ "${{ github.event_name }}" == "merge_group" ]]; then
|
||||||
|
REF=${{ github.event.merge_group.head_sha }}
|
||||||
|
else
|
||||||
|
REF=${{ github.event.pull_request.base.sha }}
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
REF=${{ inputs.dbt-core-ref }}
|
||||||
|
fi
|
||||||
|
echo "ref=$REF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Final Refs"
|
||||||
|
run: |
|
||||||
|
echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}"
|
||||||
|
echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}"
|
||||||
|
|
||||||
|
integration-tests-postgres:
|
||||||
|
name: "dbt-postgres integration tests"
|
||||||
|
needs: [job-prep]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: "./dbt-postgres"
|
||||||
|
environment:
|
||||||
|
name: "dbt-postgres"
|
||||||
|
env:
|
||||||
|
POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }}
|
||||||
|
POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }}
|
||||||
|
POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }}
|
||||||
|
POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
|
||||||
|
POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }}
|
||||||
|
POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }}
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres
|
||||||
|
env:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- ${{ vars.POSTGRES_TEST_PORT }}:5432
|
||||||
|
steps:
|
||||||
|
- name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: dbt-labs/dbt-adapters
|
||||||
|
ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }}
|
||||||
|
|
||||||
|
- name: "Set up Python"
|
||||||
|
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ inputs.python-version }}
|
||||||
|
|
||||||
|
- name: "Set environment variables"
|
||||||
|
run: |
|
||||||
|
echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV
|
||||||
|
echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: "Setup test database"
|
||||||
|
run: psql -f ./scripts/setup_test_database.sql
|
||||||
|
env:
|
||||||
|
PGHOST: ${{ vars.POSTGRES_TEST_HOST }}
|
||||||
|
PGPORT: ${{ vars.POSTGRES_TEST_PORT }}
|
||||||
|
PGUSER: postgres
|
||||||
|
PGPASSWORD: postgres
|
||||||
|
PGDATABASE: postgres
|
||||||
|
|
||||||
|
- name: "Install hatch"
|
||||||
|
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install
|
||||||
|
|
||||||
|
- name: "Run integration tests"
|
||||||
|
run: hatch run ${{ inputs.hatch-env }}:integration-tests
|
||||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
resolves #
|
Resolves #
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above if applicable.
|
Include the number of the issue addressed by this PR above, if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
@@ -26,8 +26,8 @@ resolves #
|
|||||||
|
|
||||||
### Checklist
|
### Checklist
|
||||||
|
|
||||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
||||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
||||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
||||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
||||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
||||||
|
|||||||
186
.github/workflows/artifact-reviews.yml
vendored
Normal file
186
.github/workflows/artifact-reviews.yml
vendored
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
# **what?**
|
||||||
|
# Enforces 2 reviews when artifact or validation files are modified.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Ensure artifact changes receive proper review from designated team members. GitHub doesn't support
|
||||||
|
# multiple reviews on a single PR based on files changed, so we need to enforce this manually.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when reviews are submitted and dismissed.
|
||||||
|
|
||||||
|
name: "Enforce Additional Reviews on Artifact and Validations Changes"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
checks: write
|
||||||
|
pull-requests: write
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
on:
|
||||||
|
# trigger check on review events. use pull_request_target for forks.
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened, reopened, ready_for_review, synchronize, review_requested]
|
||||||
|
pull_request_review:
|
||||||
|
types: [submitted, edited, dismissed]
|
||||||
|
|
||||||
|
# only run this once per PR at a time
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
required_approvals: 2
|
||||||
|
team: "core-group"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-reviews:
|
||||||
|
name: "Validate Additional Reviews"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: "Get list of changed files"
|
||||||
|
id: changed_files
|
||||||
|
run: |
|
||||||
|
# Fetch files as JSON and process with jq to sanitize output
|
||||||
|
gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files \
|
||||||
|
| jq -r '.[].filename' \
|
||||||
|
| while IFS= read -r file; do
|
||||||
|
# Sanitize the filename by removing any special characters and command injection attempts
|
||||||
|
clean_file=$(echo "$file" | sed 's/[^a-zA-Z0-9\.\/\-_]//g')
|
||||||
|
echo "$clean_file"
|
||||||
|
done > changed_files.txt
|
||||||
|
echo "CHANGED_FILES<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
cat changed_files.txt >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: "Check if any artifact files changed"
|
||||||
|
id: artifact_files_changed
|
||||||
|
run: |
|
||||||
|
artifact_changes=false
|
||||||
|
while IFS= read -r file; do
|
||||||
|
# Only process if file path looks legitimate
|
||||||
|
if [[ "$file" =~ ^[a-zA-Z0-9\.\/\-_]+$ ]]; then
|
||||||
|
if [[ "$file" == "core/dbt/artifacts/"* ]] ; then
|
||||||
|
artifact_changes=true
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done < changed_files.txt
|
||||||
|
echo "artifact_changes=$artifact_changes" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Get Core Team Members"
|
||||||
|
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
||||||
|
id: core_members
|
||||||
|
run: |
|
||||||
|
gh api -H "Accept: application/vnd.github+json" \
|
||||||
|
/orgs/dbt-labs/teams/${{ env.team }}/members > core_members.json
|
||||||
|
|
||||||
|
# Extract usernames and set as multiline output
|
||||||
|
echo "membership<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
jq -r '.[].login' core_members.json >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
|
||||||
|
|
||||||
|
- name: "Verify ${{ env.required_approvals }} core team approvals"
|
||||||
|
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
||||||
|
id: check_approvals
|
||||||
|
run: |
|
||||||
|
|
||||||
|
# Get all reviews
|
||||||
|
REVIEWS=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews)
|
||||||
|
echo "All reviews:"
|
||||||
|
echo "$REVIEWS"
|
||||||
|
# Count approved reviews from core team members (only most recent review per user)
|
||||||
|
CORE_APPROVALS=0
|
||||||
|
while IFS= read -r member; do
|
||||||
|
echo "Checking member: $member"
|
||||||
|
APPROVED=$(echo "$REVIEWS" | jq --arg user "$member" '
|
||||||
|
group_by(.user.login) |
|
||||||
|
map(select(.[0].user.login == $user) |
|
||||||
|
sort_by(.submitted_at) |
|
||||||
|
last) |
|
||||||
|
map(select(.state == "APPROVED" and (.state != "DISMISSED"))) |
|
||||||
|
length')
|
||||||
|
echo "Latest review state for $member: $APPROVED"
|
||||||
|
CORE_APPROVALS=$((CORE_APPROVALS + APPROVED))
|
||||||
|
echo "Running total: $CORE_APPROVALS"
|
||||||
|
done <<< "${{ steps.core_members.outputs.membership }}"
|
||||||
|
|
||||||
|
echo "CORE_APPROVALS=$CORE_APPROVALS" >> $GITHUB_OUTPUT
|
||||||
|
echo "CORE_APPROVALS=$CORE_APPROVALS"
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: "Find Comment"
|
||||||
|
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
|
||||||
|
uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # peter-evans/find-comment@v2
|
||||||
|
id: find-comment
|
||||||
|
with:
|
||||||
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
comment-author: 'github-actions[bot]'
|
||||||
|
body-includes: "### Additional Artifact Review Required"
|
||||||
|
|
||||||
|
- name: "Create Comment"
|
||||||
|
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.find-comment.outputs.comment-id == '' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
|
||||||
|
uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # peter-evans/create-or-update-comment@v3
|
||||||
|
with:
|
||||||
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
body: |
|
||||||
|
### Additional Artifact Review Required
|
||||||
|
|
||||||
|
Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members.
|
||||||
|
|
||||||
|
- name: "Notify if not enough approvals"
|
||||||
|
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
||||||
|
run: |
|
||||||
|
if [[ "${{ steps.check_approvals.outputs.CORE_APPROVALS }}" -ge "${{ env.required_approvals }}" ]]; then
|
||||||
|
title="Extra requirements met"
|
||||||
|
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
|
||||||
|
echo "::notice title=$title::$message"
|
||||||
|
echo "REVIEW_STATUS=success" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
title="PR Approval Requirements Not Met"
|
||||||
|
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
|
||||||
|
echo "::notice title=$title::$message"
|
||||||
|
echo "REVIEW_STATUS=neutral" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
id: review_check
|
||||||
|
|
||||||
|
- name: "Set check status"
|
||||||
|
id: status_check
|
||||||
|
run: |
|
||||||
|
if [[ "${{ steps.artifact_files_changed.outputs.artifact_changes }}" == 'false' ]]; then
|
||||||
|
# no extra review required
|
||||||
|
echo "current_status=success" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "${{ steps.review_check.outputs.REVIEW_STATUS }}" == "success" ]]; then
|
||||||
|
# we have all the required reviews
|
||||||
|
echo "current_status=success" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
# neutral exit - neither success nor failure
|
||||||
|
# we can't fail here because we use multiple triggers for this workflow and they won't reset the check
|
||||||
|
# workaround is to use a neutral exit to skip the check run until it's actually successful
|
||||||
|
echo "current_status=neutral" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Post Event"
|
||||||
|
# This step posts the status of the check because the workflow is triggered by multiple events
|
||||||
|
# and we need to ensure the check is always updated. Otherwise we would end up with duplicate
|
||||||
|
# checks in the GitHub UI.
|
||||||
|
run: |
|
||||||
|
if [[ "${{ steps.status_check.outputs.current_status }}" == "success" ]]; then
|
||||||
|
state="success"
|
||||||
|
else
|
||||||
|
state="failure"
|
||||||
|
fi
|
||||||
|
|
||||||
|
gh api \
|
||||||
|
--method POST \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.base.sha }} \
|
||||||
|
-f state="$state" \
|
||||||
|
-f description="Artifact Review Check" \
|
||||||
|
-f context="Artifact Review Check" \
|
||||||
|
-f target_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
50
.github/workflows/auto-respond-bug-reports.yml
vendored
Normal file
50
.github/workflows/auto-respond-bug-reports.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# **what?**
|
||||||
|
# Check if the an issue is opened near or during an extended holiday period.
|
||||||
|
# If so, post an automatically-generated comment about the holiday for bug reports.
|
||||||
|
# Also provide specific information to customers of dbt Cloud.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Explain why responses will be delayed during our holiday period.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when new issues are opened.
|
||||||
|
|
||||||
|
name: Auto-Respond to Bug Reports During Holiday Period
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto-response:
|
||||||
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
|
steps:
|
||||||
|
- name: Check if current date is within holiday period
|
||||||
|
id: date-check
|
||||||
|
run: |
|
||||||
|
current_date=$(date -u +"%Y-%m-%d")
|
||||||
|
start_date="2024-12-23"
|
||||||
|
end_date="2025-01-05"
|
||||||
|
|
||||||
|
if [[ "$current_date" < "$start_date" || "$current_date" > "$end_date" ]]; then
|
||||||
|
echo "outside_holiday=true" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "outside_holiday=false" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Post comment
|
||||||
|
if: ${{ env.outside_holiday == 'false' && contains(github.event.issue.labels.*.name, 'bug') }}
|
||||||
|
run: |
|
||||||
|
gh issue comment ${{ github.event.issue.number }} --repo ${{ github.repository }} --body "Thank you for your bug report! Our team is will be out of the office for [Christmas and our Global Week of Rest](https://handbook.getdbt.com/docs/time_off#2024-us-holidays), from December 25, 2024, through January 3, 2025.
|
||||||
|
|
||||||
|
We will review your issue as soon as possible after returning.
|
||||||
|
Thank you for your understanding, and happy holidays! 🎄🎉
|
||||||
|
|
||||||
|
If you are a customer of dbt Cloud, please contact our Customer Support team via the dbt Cloud web interface or email **support@dbtlabs.com**."
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
4
.github/workflows/backport.yml
vendored
4
.github/workflows/backport.yml
vendored
@@ -28,13 +28,13 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
backport:
|
backport:
|
||||||
name: Backport
|
name: Backport
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
# Only react to merged PRs for security reasons.
|
# Only react to merged PRs for security reasons.
|
||||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||||
if: >
|
if: >
|
||||||
github.event.pull_request.merged
|
github.event.pull_request.merged
|
||||||
&& contains(github.event.label.name, 'backport')
|
&& contains(github.event.label.name, 'backport')
|
||||||
steps:
|
steps:
|
||||||
- uses: tibdex/backport@v2.0.4
|
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # tibdex/backport@v2.0.4
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/bot-changelog.yml
vendored
4
.github/workflows/bot-changelog.yml
vendored
@@ -41,14 +41,14 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- label: "dependencies"
|
- label: "dependencies"
|
||||||
changie_kind: "Dependencies"
|
changie_kind: "Dependencies"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Create and commit changelog on bot PR
|
- name: Create and commit changelog on bot PR
|
||||||
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
|
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
|
||||||
id: bot_changelog
|
id: bot_changelog
|
||||||
uses: emmyoop/changie_bot@v1.1.0
|
uses: emmyoop/changie_bot@22b70618b13d0d1c64ea95212bafca2d2bf6b764 # emmyoop/changie_bot@v1.1.0
|
||||||
with:
|
with:
|
||||||
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
commit_author_name: "Github Build Bot"
|
commit_author_name: "Github Build Bot"
|
||||||
|
|||||||
14
.github/workflows/check-artifact-changes.yml
vendored
14
.github/workflows/check-artifact-changes.yml
vendored
@@ -4,22 +4,26 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||||
|
merge_group:
|
||||||
|
types: [checks_requested]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-artifact-changes:
|
check-artifact-changes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Check for changes in core/dbt/artifacts
|
- name: Check for changes in core/dbt/artifacts
|
||||||
# https://github.com/marketplace/actions/paths-changes-filter
|
# https://github.com/marketplace/actions/paths-changes-filter
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # dorny/paths-filter@v3
|
||||||
id: check_artifact_changes
|
id: check_artifact_changes
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
@@ -32,7 +36,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
||||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
||||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR."
|
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: CI check passed
|
- name: CI check passed
|
||||||
|
|||||||
13
.github/workflows/community-label.yml
vendored
13
.github/workflows/community-label.yml
vendored
@@ -7,7 +7,6 @@
|
|||||||
# **when?**
|
# **when?**
|
||||||
# When a PR is opened, not in draft or moved from draft to ready for review
|
# When a PR is opened, not in draft or moved from draft to ready for review
|
||||||
|
|
||||||
|
|
||||||
name: Label community PRs
|
name: Label community PRs
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@@ -29,9 +28,15 @@ jobs:
|
|||||||
# If this PR is opened and not draft, determine if it needs to be labeled
|
# If this PR is opened and not draft, determine if it needs to be labeled
|
||||||
# if the PR is converted out of draft, determine if it needs to be labeled
|
# if the PR is converted out of draft, determine if it needs to be labeled
|
||||||
if: |
|
if: |
|
||||||
(!contains(github.event.pull_request.labels.*.name, 'community') &&
|
(
|
||||||
(github.event.action == 'opened' && github.event.pull_request.draft == false ) ||
|
!contains(github.event.pull_request.labels.*.name, 'community')
|
||||||
github.event.action == 'ready_for_review' )
|
&& (
|
||||||
|
(github.event.action == 'opened' && github.event.pull_request.draft == false)
|
||||||
|
|| github.event.action == 'ready_for_review'
|
||||||
|
)
|
||||||
|
&& github.event.pull_request.user.type != 'Bot'
|
||||||
|
&& github.event.pull_request.user.login != 'dependabot[bot]'
|
||||||
|
)
|
||||||
uses: dbt-labs/actions/.github/workflows/label-community.yml@main
|
uses: dbt-labs/actions/.github/workflows/label-community.yml@main
|
||||||
with:
|
with:
|
||||||
github_team: 'core-group'
|
github_team: 'core-group'
|
||||||
|
|||||||
388
.github/workflows/cut-release-branch.yml
vendored
388
.github/workflows/cut-release-branch.yml
vendored
@@ -1,25 +1,44 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Cuts a new `*.latest` branch
|
# Cuts the `*.latest` branch, bumps dependencies on it, cleans up all files in `.changes/unreleased`
|
||||||
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
# and `.changes/previous verion on main and bumps main to the input version.
|
||||||
# `main` and bumps `main` to the input version.
|
|
||||||
|
|
||||||
# **why?**
|
# **why?**
|
||||||
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
# Clean up the main branch after a release branch is cut and automate cutting the release branch.
|
||||||
|
# Generally reduces the workload of engineers and reducing error.
|
||||||
|
|
||||||
# **when?**
|
# **when?**
|
||||||
# This will run when called manually.
|
# This will run when called manually or when triggered in another workflow.
|
||||||
|
|
||||||
|
# Example Usage including required permissions: TODO: update once finalized
|
||||||
|
|
||||||
|
# permissions:
|
||||||
|
# contents: read
|
||||||
|
# pull-requests: write
|
||||||
|
#
|
||||||
|
# name: Cut Release Branch
|
||||||
|
# jobs:
|
||||||
|
# changelog:
|
||||||
|
# uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||||
|
# with:
|
||||||
|
# new_branch_name: 1.7.latest
|
||||||
|
# PR_title: "Cleanup main after cutting new 1.7.latest branch"
|
||||||
|
# PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||||
|
# secrets:
|
||||||
|
# FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
# TODOs
|
||||||
|
# add note to eventually commit changes directly and bypass checks - same as release - when we move to this model run test action after merge
|
||||||
|
|
||||||
name: Cut new release branch
|
name: Cut new release branch
|
||||||
|
run-name: "Cutting New Branch: ${{ inputs.new_branch_name }}"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
version_to_bump_main:
|
|
||||||
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
|
||||||
required: true
|
|
||||||
new_branch_name:
|
new_branch_name:
|
||||||
description: 'The full name of the new branch (ex. 1.5.latest)'
|
description: "The full name of the new branch (ex. 1.5.latest)"
|
||||||
required: true
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -27,15 +46,346 @@ defaults:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
PYTHON_TARGET_VERSION: "3.10"
|
||||||
|
PR_TITLE: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||||
|
PR_BODY: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cut_branch:
|
prep_work:
|
||||||
name: "Cut branch and clean up main for dbt-core"
|
name: "Prep Work"
|
||||||
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
runs-on: ubuntu-latest
|
||||||
with:
|
steps:
|
||||||
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
- name: "[DEBUG] Print Inputs"
|
||||||
new_branch_name: ${{ inputs.new_branch_name }}
|
run: |
|
||||||
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
echo "new_branch_name: ${{ inputs.new_branch_name }}"
|
||||||
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
echo "PR_title: ${{ env.PR_TITLE }}"
|
||||||
secrets:
|
echo "PR_body: ${{ env.PR_BODY }}"
|
||||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
create_temp_branch:
|
||||||
|
name: "Create Temp branch off main"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
temp_branch_name: ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Set Branch Value"
|
||||||
|
id: variables
|
||||||
|
run: |
|
||||||
|
echo "BRANCH_NAME=cutting_release_branch/main_cleanup_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: "main"
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Create PR Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||||
|
git push --set-upstream origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: "[Notification] Temp branch created"
|
||||||
|
run: |
|
||||||
|
message="Temp branch ${{ steps.variables.outputs.BRANCH_NAME }} created"
|
||||||
|
echo "::notice title="Temporary branch created": $title::$message"
|
||||||
|
|
||||||
|
cleanup_changelog:
|
||||||
|
name: "Clean Up Changelog"
|
||||||
|
needs: ["create_temp_branch"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
next-version: ${{ steps.semver-current.outputs.next-minor-alpha-version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Add Homebrew To PATH"
|
||||||
|
run: |
|
||||||
|
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: "Install Homebrew Packages"
|
||||||
|
run: |
|
||||||
|
brew install pre-commit
|
||||||
|
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
||||||
|
brew install changie
|
||||||
|
|
||||||
|
- name: "Check Current Version In Code"
|
||||||
|
id: determine_version
|
||||||
|
run: |
|
||||||
|
current_version=$(grep '^version = ' core/pyproject.toml | sed 's/version = "\(.*\)"/\1/')
|
||||||
|
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "[Notification] Check Current Version In Code"
|
||||||
|
run: |
|
||||||
|
message="The current version is ${{ steps.determine_version.outputs.current_version }}"
|
||||||
|
echo "::notice title="Version Bump Check": $title::$message"
|
||||||
|
|
||||||
|
- name: "Parse Current Version Into Parts for Changelog Directories"
|
||||||
|
id: semver-current
|
||||||
|
uses: dbt-labs/actions/parse-semver@main
|
||||||
|
with:
|
||||||
|
version: ${{ steps.determine_version.outputs.current_version }}
|
||||||
|
|
||||||
|
- name: "[Notification] Next Alpha Version"
|
||||||
|
run: |
|
||||||
|
message="The next alpha version is ${{ steps.semver-current.outputs.next-minor-alpha-version }}"
|
||||||
|
echo "::notice title="Version Bump Check": $title::$message"
|
||||||
|
|
||||||
|
- name: "Delete Unreleased Changelog YAMLs"
|
||||||
|
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm .changes/unreleased/*.yaml || true
|
||||||
|
|
||||||
|
- name: "Delete Pre Release Changelogs and YAMLs"
|
||||||
|
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm .changes/${{ steps.semver-current.outputs.base-version }}/*.yaml || true
|
||||||
|
rm .changes/${{ steps.semver-current.outputs.major }}.${{ steps.semver-current.outputs.minor }}.*.md || true
|
||||||
|
|
||||||
|
- name: "Cleanup CHANGELOG.md"
|
||||||
|
run: |
|
||||||
|
changie merge
|
||||||
|
|
||||||
|
- name: "Commit Changelog Cleanup to Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "Clean up changelog on main"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: "[Notification] Changelog cleaned up"
|
||||||
|
run: |
|
||||||
|
message="Changelog on ${{ needs.create_temp_branch.outputs.temp_branch_name }} cleaned up"
|
||||||
|
echo "::notice title="Changelog cleaned up": $title::$message"
|
||||||
|
|
||||||
|
bump_version:
|
||||||
|
name: "Bump to next minor version"
|
||||||
|
needs: ["cleanup_changelog", "create_temp_branch"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
|
||||||
|
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "${{ env.PYTHON_TARGET_VERSION }}"
|
||||||
|
|
||||||
|
- name: "Install Spark Dependencies"
|
||||||
|
if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install libsasl2-dev
|
||||||
|
|
||||||
|
- name: "Install Python Dependencies"
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install hatch
|
||||||
|
|
||||||
|
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||||
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch version ${{ needs.cleanup_changelog.outputs.next-version }}
|
||||||
|
hatch run dev-req
|
||||||
|
dbt --version
|
||||||
|
|
||||||
|
- name: "Commit Version Bump to Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "Bumping version to ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: "[Notification] Version Bump completed"
|
||||||
|
run: |
|
||||||
|
message="Version on ${{ needs.create_temp_branch.outputs.temp_branch_name }} bumped to ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||||
|
echo "::notice title="Version Bump Completed": $title::$message"
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
name: "Cleanup Code Quality"
|
||||||
|
needs: ["create_temp_branch", "bump_version"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Add Homebrew To PATH"
|
||||||
|
run: |
|
||||||
|
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: "brew install pre-commit"
|
||||||
|
run: |
|
||||||
|
brew install pre-commit
|
||||||
|
|
||||||
|
# this step will fail on whitespace errors but also correct them
|
||||||
|
- name: "Cleanup - Remove Trailing Whitespace Via Pre-commit"
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* || true
|
||||||
|
|
||||||
|
# this step will fail on newline errors but also correct them
|
||||||
|
- name: "Cleanup - Remove Extra Newlines Via Pre-commit"
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* || true
|
||||||
|
|
||||||
|
- name: "Commit Version Bump to Branch"
|
||||||
|
run: |
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "Code quality cleanup"
|
||||||
|
git push
|
||||||
|
|
||||||
|
open_pr:
|
||||||
|
name: "Open PR Against main"
|
||||||
|
needs: ["cleanup_changelog", "create_temp_branch", "cleanup"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
pr_number: ${{ steps.create_pr.outputs.pull-request-number }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "Determine PR Title"
|
||||||
|
id: pr_title
|
||||||
|
run: |
|
||||||
|
echo "pr_title=${{ env.PR_TITLE }}" >> $GITHUB_OUTPUT
|
||||||
|
if [${{ env.PR_TITLE }} == ""]; then
|
||||||
|
echo "pr_title='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Determine PR Body"
|
||||||
|
id: pr_body
|
||||||
|
run: |
|
||||||
|
echo "pr_body=${{ env.PR_BODY }}" >> $GITHUB_OUTPUT
|
||||||
|
if [${{ env.PR_BODY }} == ""]; then
|
||||||
|
echo "pr_body='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Add Branch Details"
|
||||||
|
id: pr_body_branch
|
||||||
|
run: |
|
||||||
|
branch_details="The workflow that generated this PR also created a new branch: ${{ inputs.new_branch_name }}"
|
||||||
|
full_body="${{ steps.pr_body.outputs.pr_body }} $branch_details"
|
||||||
|
echo "pr_full_body=$full_body" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Open Pull Request"
|
||||||
|
id: create_pr
|
||||||
|
run: |
|
||||||
|
pr_url=$(gh pr create -B main -H ${{ needs.create_temp_branch.outputs.temp_branch_name }} -l "Skip Changelog" -t "${{ steps.pr_title.outputs.pr_title }}" -b "${{ steps.pr_body_branch.outputs.pr_full_body }}")
|
||||||
|
echo "pr_url=$pr_url" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
|
||||||
|
- name: "[Notification] Pull Request Opened"
|
||||||
|
run: |
|
||||||
|
message="PR opened at ${{ steps.create_pr.outputs.pr_url }}"
|
||||||
|
echo "::notice title="Pull Request Opened": $title::$message"
|
||||||
|
|
||||||
|
cut_new_branch:
|
||||||
|
# don't cut the new branch until we're done opening the PR against main
|
||||||
|
name: "Cut New Branch ${{ inputs.new_branch_name }}"
|
||||||
|
needs: [open_pr]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }}"
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: "Ensure New Branch Does Not Exist"
|
||||||
|
id: check_new_branch
|
||||||
|
run: |
|
||||||
|
title="Check New Branch Existence"
|
||||||
|
if git show-ref --quiet ${{ inputs.new_branch_name }}; then
|
||||||
|
message="Branch ${{ inputs.new_branch_name }} already exists. Exiting."
|
||||||
|
echo "::error $title::$message"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Create New Release Branch"
|
||||||
|
run: |
|
||||||
|
git checkout -b ${{ inputs.new_branch_name }}
|
||||||
|
|
||||||
|
- name: "Push up New Branch"
|
||||||
|
run: |
|
||||||
|
#Data for commit
|
||||||
|
user="Github Build Bot"
|
||||||
|
email="buildbot@fishtownanalytics.com"
|
||||||
|
git config user.name "$user"
|
||||||
|
git config user.email "$email"
|
||||||
|
git push --set-upstream origin ${{ inputs.new_branch_name }}
|
||||||
|
|
||||||
|
- name: "[Notification] New branch created"
|
||||||
|
run: |
|
||||||
|
message="New branch ${{ inputs.new_branch_name }} created"
|
||||||
|
echo "::notice title="New branch created": $title::$message"
|
||||||
|
|
||||||
|
- name: "Bump dependencies via script"
|
||||||
|
# This bumps the dependency on dbt-core in the adapters
|
||||||
|
if: ${{ !contains(github.repository, 'dbt-core') }}
|
||||||
|
run: |
|
||||||
|
echo ${{ github.repository }}
|
||||||
|
echo "running update_dependencies script"
|
||||||
|
bash ${GITHUB_WORKSPACE}/.github/scripts/update_dependencies.sh ${{ inputs.new_branch_name }}
|
||||||
|
commit_message="bumping .latest branch variable in update_dependencies.sh to ${{ inputs.new_branch_name }}"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "$commit_message"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: "Bump env variable via script"
|
||||||
|
# bumps the RELEASE_BRANCH variable in nightly-release.yml in adapters
|
||||||
|
if: ${{ !contains(github.repository, 'dbt-core') }}
|
||||||
|
run: |
|
||||||
|
file="./.github/scripts/update_release_branch.sh"
|
||||||
|
if test -f "$file"; then
|
||||||
|
echo ${{ github.repository }}
|
||||||
|
echo "running some script yet to be written now"
|
||||||
|
bash $file ${{ inputs.new_branch_name }}
|
||||||
|
commit_message="updating env variable to ${{ inputs.new_branch_name }} in nightly-release.yml"
|
||||||
|
git status
|
||||||
|
git add .
|
||||||
|
git commit -m "$commit_message"
|
||||||
|
git push
|
||||||
|
else
|
||||||
|
echo "no $file seen skipping step"
|
||||||
|
fi
|
||||||
|
|||||||
4
.github/workflows/docs-issue.yml
vendored
4
.github/workflows/docs-issue.yml
vendored
@@ -36,6 +36,6 @@ jobs:
|
|||||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||||
with:
|
with:
|
||||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
issue_title: "[Core] Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
||||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated.\n Originating from this issue: https://github.com/dbt-labs/dbt-core/issues/${{ github.event.issue.number }}"
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|||||||
218
.github/workflows/main.yml
vendored
218
.github/workflows/main.yml
vendored
@@ -20,6 +20,8 @@ on:
|
|||||||
- "*.latest"
|
- "*.latest"
|
||||||
- "releases/*"
|
- "releases/*"
|
||||||
pull_request:
|
pull_request:
|
||||||
|
merge_group:
|
||||||
|
types: [checks_requested]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
@@ -47,26 +49,33 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
make dev
|
python -m pip install hatch
|
||||||
mypy --version
|
cd core
|
||||||
dbt --version
|
hatch run setup
|
||||||
|
|
||||||
|
- name: Verify dbt installation
|
||||||
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run dbt --version
|
||||||
|
|
||||||
- name: Run pre-commit hooks
|
- name: Run pre-commit hooks
|
||||||
run: pre-commit run --all-files --show-diff-on-failure
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run code-quality
|
||||||
|
|
||||||
unit:
|
unit:
|
||||||
name: unit test / python ${{ matrix.python-version }}
|
name: "unit test / python ${{ matrix.python-version }}"
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
@@ -74,17 +83,14 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||||
|
|
||||||
env:
|
|
||||||
TOXENV: "unit"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
@@ -92,15 +98,15 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
python -m pip install tox
|
python -m pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
uses: nick-fields/retry@v3
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
with:
|
with:
|
||||||
timeout_minutes: 10
|
timeout_minutes: 10
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -e unit
|
command: cd core && hatch run ci:unit-tests
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
@@ -111,10 +117,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload Unit Test Coverage to Codecov
|
- name: Upload Unit Test Coverage to Codecov
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: unit
|
flags: unit
|
||||||
|
fail_ci_if_error: false
|
||||||
|
|
||||||
integration-metadata:
|
integration-metadata:
|
||||||
name: integration test metadata generation
|
name: integration test metadata generation
|
||||||
@@ -139,7 +146,7 @@ jobs:
|
|||||||
- name: generate include
|
- name: generate include
|
||||||
id: generate-include
|
id: generate-include
|
||||||
run: |
|
run: |
|
||||||
INCLUDE=('"python-version":"3.9","os":"windows-latest"' '"python-version":"3.9","os":"macos-14"' )
|
INCLUDE=('"python-version":"3.10","os":"windows-latest"' '"python-version":"3.10","os":"macos-14"' )
|
||||||
INCLUDE_GROUPS="["
|
INCLUDE_GROUPS="["
|
||||||
for include in ${INCLUDE[@]}; do
|
for include in ${INCLUDE[@]}; do
|
||||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
@@ -151,7 +158,102 @@ jobs:
|
|||||||
echo "include=${INCLUDE_GROUPS}"
|
echo "include=${INCLUDE_GROUPS}"
|
||||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
integration:
|
integration-postgres:
|
||||||
|
name: "(${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}"
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
timeout-minutes: 30
|
||||||
|
needs:
|
||||||
|
- integration-metadata
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||||
|
os: ["ubuntu-latest"]
|
||||||
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
|
env:
|
||||||
|
DBT_INVOCATION_ENV: github-actions
|
||||||
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
|
DBT_TEST_USER_3: dbt_test_user_3
|
||||||
|
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||||
|
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||||
|
DD_SITE: datadoghq.com
|
||||||
|
DD_ENV: ci
|
||||||
|
DD_SERVICE: ${{ github.event.repository.name }}
|
||||||
|
|
||||||
|
services:
|
||||||
|
# Label used to access the service container
|
||||||
|
postgres:
|
||||||
|
# Docker Hub image
|
||||||
|
image: postgres
|
||||||
|
# Provide the password for postgres
|
||||||
|
env:
|
||||||
|
POSTGRES_PASSWORD: password
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
# Set health checks to wait until postgres has started
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Run postgres setup script
|
||||||
|
run: |
|
||||||
|
./scripts/setup_db.sh
|
||||||
|
env:
|
||||||
|
PGHOST: localhost
|
||||||
|
PGPORT: 5432
|
||||||
|
PGPASSWORD: password
|
||||||
|
|
||||||
|
- name: Install python tools
|
||||||
|
run: |
|
||||||
|
python -m pip install --user --upgrade pip
|
||||||
|
python -m pip --version
|
||||||
|
python -m pip install hatch
|
||||||
|
hatch --version
|
||||||
|
|
||||||
|
- name: Run integration tests
|
||||||
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
|
with:
|
||||||
|
timeout_minutes: 30
|
||||||
|
max_attempts: 3
|
||||||
|
shell: bash
|
||||||
|
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||||
|
|
||||||
|
- name: Get current date
|
||||||
|
if: always()
|
||||||
|
id: date
|
||||||
|
run: |
|
||||||
|
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||||
|
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
||||||
|
path: ./logs
|
||||||
|
|
||||||
|
- name: Upload Integration Test Coverage to Codecov
|
||||||
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
|
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
flags: integration
|
||||||
|
fail_ci_if_error: false
|
||||||
|
|
||||||
|
integration-mac-windows:
|
||||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -161,12 +263,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
# already includes split group and runs mac + windows
|
||||||
os: [ubuntu-20.04]
|
|
||||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
|
||||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||||
env:
|
env:
|
||||||
TOXENV: integration
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
DBT_INVOCATION_ENV: github-actions
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
@@ -179,20 +278,21 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Set up postgres (linux)
|
|
||||||
if: runner.os == 'Linux'
|
|
||||||
uses: ./.github/actions/setup-postgres-linux
|
|
||||||
|
|
||||||
- name: Set up postgres (macos)
|
- name: Set up postgres (macos)
|
||||||
if: runner.os == 'macOS'
|
if: runner.os == 'macOS'
|
||||||
uses: ./.github/actions/setup-postgres-macos
|
|
||||||
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
|
with:
|
||||||
|
timeout_minutes: 10
|
||||||
|
max_attempts: 3
|
||||||
|
command: ./scripts/setup_db.sh
|
||||||
|
|
||||||
- name: Set up postgres (windows)
|
- name: Set up postgres (windows)
|
||||||
if: runner.os == 'Windows'
|
if: runner.os == 'Windows'
|
||||||
@@ -202,17 +302,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
python -m pip install tox
|
python -m pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
uses: nick-fields/retry@v3
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
with:
|
with:
|
||||||
timeout_minutes: 30
|
timeout_minutes: 30
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -- --ddtrace
|
shell: bash
|
||||||
env:
|
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
@@ -221,7 +320,7 @@ jobs:
|
|||||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
||||||
@@ -229,19 +328,20 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload Integration Test Coverage to Codecov
|
- name: Upload Integration Test Coverage to Codecov
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: integration
|
flags: integration
|
||||||
|
fail_ci_if_error: false
|
||||||
|
|
||||||
integration-report:
|
integration-report:
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
name: Integration Test Suite
|
name: Integration Test Suite
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: integration
|
needs: [integration-mac-windows, integration-postgres]
|
||||||
steps:
|
steps:
|
||||||
- name: "Integration Tests Failed"
|
- name: "Integration Tests Failed"
|
||||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
if: ${{ contains(needs.integration-mac-windows.result, 'failure') || contains(needs.integration-mac-windows.result, 'cancelled') || contains(needs.integration-postgres.result, 'failure') || contains(needs.integration-postgres.result, 'cancelled') }}
|
||||||
# when this is true the next step won't execute
|
# when this is true the next step won't execute
|
||||||
run: |
|
run: |
|
||||||
echo "::notice title='Integration test suite failed'"
|
echo "::notice title='Integration test suite failed'"
|
||||||
@@ -258,17 +358,17 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
python -m pip install --upgrade hatch twine check-wheel-contents
|
||||||
python -m pip --version
|
python -m pip --version
|
||||||
|
|
||||||
- name: Build distributions
|
- name: Build distributions
|
||||||
@@ -277,27 +377,7 @@ jobs:
|
|||||||
- name: Show distributions
|
- name: Show distributions
|
||||||
run: ls -lh dist/
|
run: ls -lh dist/
|
||||||
|
|
||||||
- name: Check distribution descriptions
|
- name: Check and verify distributions
|
||||||
run: |
|
run: |
|
||||||
twine check dist/*
|
cd core
|
||||||
|
hatch run build:check-all
|
||||||
- name: Check wheel contents
|
|
||||||
run: |
|
|
||||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
|
||||||
|
|
||||||
- name: Install wheel distributions
|
|
||||||
run: |
|
|
||||||
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check wheel distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
- name: Install source distributions
|
|
||||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
|
||||||
run: |
|
|
||||||
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check source distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|||||||
265
.github/workflows/model_performance.yml
vendored
265
.github/workflows/model_performance.yml
vendored
@@ -1,265 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# This workflow models the performance characteristics of a point in time in dbt.
|
|
||||||
# It runs specific dbt commands on committed projects multiple times to create and
|
|
||||||
# commit information about the distribution to the current branch. For more information
|
|
||||||
# see the readme in the performance module at /performance/README.md.
|
|
||||||
#
|
|
||||||
# **why?**
|
|
||||||
# When developing new features, we can take quick performance samples and compare
|
|
||||||
# them against the commited baseline measurements produced by this workflow to detect
|
|
||||||
# some performance regressions at development time before they reach users.
|
|
||||||
#
|
|
||||||
# **when?**
|
|
||||||
# This is only run once directly after each release (for non-prereleases). If for some
|
|
||||||
# reason the results of a run are not satisfactory, it can also be triggered manually.
|
|
||||||
|
|
||||||
name: Model Performance Characteristics
|
|
||||||
|
|
||||||
on:
|
|
||||||
# runs after non-prereleases are published.
|
|
||||||
release:
|
|
||||||
types: [released]
|
|
||||||
# run manually from the actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
release_id:
|
|
||||||
description: 'dbt version to model (must be non-prerelease in Pypi)'
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
RUNNER_CACHE_PATH: performance/runner/target/release/runner
|
|
||||||
|
|
||||||
# both jobs need to write
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
set-variables:
|
|
||||||
name: Setting Variables
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
cache_key: ${{ steps.variables.outputs.cache_key }}
|
|
||||||
release_id: ${{ steps.semver.outputs.base-version }}
|
|
||||||
release_branch: ${{ steps.variables.outputs.release_branch }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
# explicitly checkout the performance runner from main regardless of which
|
|
||||||
# version we are modeling.
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
|
|
||||||
- name: Parse version into parts
|
|
||||||
id: semver
|
|
||||||
uses: dbt-labs/actions/parse-semver@v1
|
|
||||||
with:
|
|
||||||
version: ${{ github.event.inputs.release_id || github.event.release.tag_name }}
|
|
||||||
|
|
||||||
# collect all the variables that need to be used in subsequent jobs
|
|
||||||
- name: Set variables
|
|
||||||
id: variables
|
|
||||||
run: |
|
|
||||||
# create a cache key that will be used in the next job. without this the
|
|
||||||
# next job would have to checkout from main and hash the files itself.
|
|
||||||
echo "cache_key=${{ runner.os }}-${{ hashFiles('performance/runner/Cargo.toml')}}-${{ hashFiles('performance/runner/src/*') }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
branch_name="${{steps.semver.outputs.major}}.${{steps.semver.outputs.minor}}.latest"
|
|
||||||
echo "release_branch=$branch_name" >> $GITHUB_OUTPUT
|
|
||||||
echo "release branch is inferred to be ${branch_name}"
|
|
||||||
|
|
||||||
latest-runner:
|
|
||||||
name: Build or Fetch Runner
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [set-variables]
|
|
||||||
env:
|
|
||||||
RUSTFLAGS: "-D warnings"
|
|
||||||
steps:
|
|
||||||
- name: '[DEBUG] print variables'
|
|
||||||
run: |
|
|
||||||
echo "all variables defined in set-variables"
|
|
||||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
|
||||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
|
||||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
|
||||||
|
|
||||||
# explicitly checkout the performance runner from main regardless of which
|
|
||||||
# version we are modeling.
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
|
|
||||||
# attempts to access a previously cached runner
|
|
||||||
- uses: actions/cache@v4
|
|
||||||
id: cache
|
|
||||||
with:
|
|
||||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
|
||||||
key: ${{ needs.set-variables.outputs.cache_key }}
|
|
||||||
|
|
||||||
- name: Fetch Rust Toolchain
|
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Add fmt
|
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
|
||||||
run: rustup component add rustfmt
|
|
||||||
|
|
||||||
- name: Cargo fmt
|
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
|
||||||
|
|
||||||
- name: Test
|
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --manifest-path performance/runner/Cargo.toml
|
|
||||||
|
|
||||||
- name: Build (optimized)
|
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --manifest-path performance/runner/Cargo.toml
|
|
||||||
# the cache action automatically caches this binary at the end of the job
|
|
||||||
|
|
||||||
model:
|
|
||||||
# depends on `latest-runner` as a separate job so that failures in this job do not prevent
|
|
||||||
# a successfully tested and built binary from being cached.
|
|
||||||
needs: [set-variables, latest-runner]
|
|
||||||
name: Model a release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: '[DEBUG] print variables'
|
|
||||||
run: |
|
|
||||||
echo "all variables defined in set-variables"
|
|
||||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
|
||||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
|
||||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
|
||||||
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.9"
|
|
||||||
|
|
||||||
- name: Install dbt
|
|
||||||
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
|
||||||
|
|
||||||
- name: Install Hyperfine
|
|
||||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
|
||||||
|
|
||||||
# explicitly checkout main to get the latest project definitions
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
|
|
||||||
# this was built in the previous job so it will be there.
|
|
||||||
- name: Fetch Runner
|
|
||||||
uses: actions/cache@v4
|
|
||||||
id: cache
|
|
||||||
with:
|
|
||||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
|
||||||
key: ${{ needs.set-variables.outputs.cache_key }}
|
|
||||||
|
|
||||||
- name: Move Runner
|
|
||||||
run: mv performance/runner/target/release/runner performance/app
|
|
||||||
|
|
||||||
- name: Change Runner Permissions
|
|
||||||
run: chmod +x ./performance/app
|
|
||||||
|
|
||||||
- name: '[DEBUG] ls baseline directory before run'
|
|
||||||
run: ls -R performance/baselines/
|
|
||||||
|
|
||||||
# `${{ github.workspace }}` is used to pass the absolute path
|
|
||||||
- name: Create directories
|
|
||||||
run: |
|
|
||||||
mkdir ${{ github.workspace }}/performance/tmp/
|
|
||||||
mkdir -p performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
|
||||||
|
|
||||||
# Run modeling with taking 20 samples
|
|
||||||
- name: Run Measurement
|
|
||||||
run: |
|
|
||||||
performance/app model -v ${{ needs.set-variables.outputs.release_id }} -b ${{ github.workspace }}/performance/baselines/ -p ${{ github.workspace }}/performance/projects/ -t ${{ github.workspace }}/performance/tmp/ -n 20
|
|
||||||
|
|
||||||
- name: '[DEBUG] ls baseline directory after run'
|
|
||||||
run: ls -R performance/baselines/
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: baseline
|
|
||||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
|
||||||
|
|
||||||
create-pr:
|
|
||||||
name: Open PR for ${{ matrix.base-branch }}
|
|
||||||
|
|
||||||
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
|
|
||||||
# i.e. release branch and main
|
|
||||||
needs: [set-variables, latest-runner, model]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- base-branch: refs/heads/main
|
|
||||||
target-branch: performance-bot/main_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
|
||||||
- base-branch: refs/heads/${{ needs.set-variables.outputs.release_branch }}
|
|
||||||
target-branch: performance-bot/release_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: '[DEBUG] print variables'
|
|
||||||
run: |
|
|
||||||
echo "all variables defined in set-variables"
|
|
||||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
|
||||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
|
||||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ matrix.base-branch }}
|
|
||||||
|
|
||||||
- name: Create PR branch
|
|
||||||
run: |
|
|
||||||
git checkout -b ${{ matrix.target-branch }}
|
|
||||||
git push origin ${{ matrix.target-branch }}
|
|
||||||
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
|
|
||||||
|
|
||||||
- uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: baseline
|
|
||||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
|
|
||||||
|
|
||||||
- name: '[DEBUG] ls baselines after artifact download'
|
|
||||||
run: ls -R performance/baselines/
|
|
||||||
|
|
||||||
- name: Commit baseline
|
|
||||||
uses: EndBug/add-and-commit@v9
|
|
||||||
with:
|
|
||||||
add: 'performance/baselines/*'
|
|
||||||
author_name: 'Github Build Bot'
|
|
||||||
author_email: 'buildbot@fishtownanalytics.com'
|
|
||||||
message: 'adding performance baseline for ${{ needs.set-variables.outputs.release_id }}'
|
|
||||||
push: 'origin origin/${{ matrix.target-branch }}'
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
|
||||||
uses: peter-evans/create-pull-request@v6
|
|
||||||
with:
|
|
||||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
|
||||||
base: ${{ matrix.base-branch }}
|
|
||||||
branch: '${{ matrix.target-branch }}'
|
|
||||||
title: 'Adding performance modeling for ${{needs.set-variables.outputs.release_id}} to ${{ matrix.base-branch }}'
|
|
||||||
body: 'Committing perf results for tracking for the ${{needs.set-variables.outputs.release_id}}'
|
|
||||||
labels: |
|
|
||||||
Skip Changelog
|
|
||||||
Performance
|
|
||||||
8
.github/workflows/nightly-release.yml
vendored
8
.github/workflows/nightly-release.yml
vendored
@@ -31,7 +31,7 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
aggregate-release-data:
|
aggregate-release-data:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||||
@@ -39,14 +39,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ env.RELEASE_BRANCH }}
|
ref: ${{ env.RELEASE_BRANCH }}
|
||||||
|
|
||||||
- name: "Get Current Version Number"
|
- name: "Get Current Version Number"
|
||||||
id: version-number-sources
|
id: version-number-sources
|
||||||
run: |
|
run: |
|
||||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
current_version=$(grep '^version = ' core/dbt/__version__.py | sed 's/version = "\(.*\)"/\1/')
|
||||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: "Audit Version And Parse Into Parts"
|
- name: "Audit Version And Parse Into Parts"
|
||||||
@@ -76,7 +76,7 @@ jobs:
|
|||||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
log-outputs-aggregate-release-data:
|
log-outputs-aggregate-release-data:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
needs: [aggregate-release-data]
|
needs: [aggregate-release-data]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
76
.github/workflows/release.yml
vendored
76
.github/workflows/release.yml
vendored
@@ -72,12 +72,15 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
env:
|
||||||
|
MIN_HATCH_VERSION: "1.11.0"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
job-setup:
|
job-setup:
|
||||||
name: Log Inputs
|
name: Log Inputs
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
outputs:
|
outputs:
|
||||||
starting_sha: ${{ steps.set_sha.outputs.starting_sha }}
|
use_hatch: ${{ steps.use_hatch.outputs.use_hatch }}
|
||||||
steps:
|
steps:
|
||||||
- name: "[DEBUG] Print Variables"
|
- name: "[DEBUG] Print Variables"
|
||||||
run: |
|
run: |
|
||||||
@@ -88,19 +91,29 @@ jobs:
|
|||||||
echo Nightly release: ${{ inputs.nightly_release }}
|
echo Nightly release: ${{ inputs.nightly_release }}
|
||||||
echo Only Docker: ${{ inputs.only_docker }}
|
echo Only Docker: ${{ inputs.only_docker }}
|
||||||
|
|
||||||
- name: "Checkout target branch"
|
# In version env.HATCH_VERSION we started to use hatch for build tooling. Before that we used setuptools.
|
||||||
uses: actions/checkout@v4
|
# This needs to check if we're using hatch or setuptools based on the version being released. We should
|
||||||
with:
|
# check if the version is greater than or equal to env.HATCH_VERSION. If it is, we use hatch, otherwise we use setuptools.
|
||||||
ref: ${{ inputs.target_branch }}
|
- name: "Check if using hatch"
|
||||||
|
id: use_hatch
|
||||||
# release-prep.yml really shouldn't take in the sha but since core + all adapters
|
|
||||||
# depend on it now this workaround lets us not input it manually with risk of error.
|
|
||||||
# The changes always get merged into the head so we can't use a specific commit for
|
|
||||||
# releases anyways.
|
|
||||||
- name: "Capture sha"
|
|
||||||
id: set_sha
|
|
||||||
run: |
|
run: |
|
||||||
echo "starting_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
# Extract major.minor from versions like 1.11.0a1 -> 1.11
|
||||||
|
INPUT_MAJ_MIN=$(echo "${{ inputs.version_number }}" | sed -E 's/^([0-9]+\.[0-9]+).*/\1/')
|
||||||
|
HATCH_MAJ_MIN=$(echo "${{ env.MIN_HATCH_VERSION }}" | sed -E 's/^([0-9]+\.[0-9]+).*/\1/')
|
||||||
|
|
||||||
|
if [ $(echo "$INPUT_MAJ_MIN >= $HATCH_MAJ_MIN" | bc) -eq 1 ]; then
|
||||||
|
echo "use_hatch=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "use_hatch=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: "Notify if using hatch"
|
||||||
|
run: |
|
||||||
|
if [ ${{ steps.use_hatch.outputs.use_hatch }} = "true" ]; then
|
||||||
|
echo "::notice title="Using Hatch": $title::Using Hatch for release"
|
||||||
|
else
|
||||||
|
echo "::notice title="Using Setuptools": $title::Using Setuptools for release"
|
||||||
|
fi
|
||||||
|
|
||||||
bump-version-generate-changelog:
|
bump-version-generate-changelog:
|
||||||
name: Bump package version, Generate changelog
|
name: Bump package version, Generate changelog
|
||||||
@@ -110,12 +123,13 @@ jobs:
|
|||||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||||
|
|
||||||
with:
|
with:
|
||||||
sha: ${{ needs.job-setup.outputs.starting_sha }}
|
|
||||||
version_number: ${{ inputs.version_number }}
|
version_number: ${{ inputs.version_number }}
|
||||||
|
hatch_directory: "core"
|
||||||
target_branch: ${{ inputs.target_branch }}
|
target_branch: ${{ inputs.target_branch }}
|
||||||
env_setup_script_path: "scripts/env-setup.sh"
|
env_setup_script_path: "scripts/env-setup.sh"
|
||||||
test_run: ${{ inputs.test_run }}
|
test_run: ${{ inputs.test_run }}
|
||||||
nightly_release: ${{ inputs.nightly_release }}
|
nightly_release: ${{ inputs.nightly_release }}
|
||||||
|
use_hatch: ${{ needs.job-setup.outputs.use_hatch == 'true' }} # workflow outputs are strings...
|
||||||
|
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
@@ -125,7 +139,7 @@ jobs:
|
|||||||
|
|
||||||
needs: [bump-version-generate-changelog]
|
needs: [bump-version-generate-changelog]
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Print variables
|
- name: Print variables
|
||||||
@@ -143,16 +157,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
version_number: ${{ inputs.version_number }}
|
version_number: ${{ inputs.version_number }}
|
||||||
|
hatch_directory: "core"
|
||||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
build_script_path: "scripts/build-dist.sh"
|
build_script_path: "scripts/build-dist.sh"
|
||||||
s3_bucket_name: "core-team-artifacts"
|
|
||||||
package_test_command: "dbt --version"
|
package_test_command: "dbt --version"
|
||||||
test_run: ${{ inputs.test_run }}
|
test_run: ${{ inputs.test_run }}
|
||||||
nightly_release: ${{ inputs.nightly_release }}
|
nightly_release: ${{ inputs.nightly_release }}
|
||||||
|
use_hatch: ${{ needs.job-setup.outputs.use_hatch == 'true' }} # workflow outputs are strings...
|
||||||
secrets:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
|
|
||||||
github-release:
|
github-release:
|
||||||
name: GitHub Release
|
name: GitHub Release
|
||||||
@@ -188,7 +199,7 @@ jobs:
|
|||||||
# determine if we need to release dbt-core or both dbt-core and dbt-postgres
|
# determine if we need to release dbt-core or both dbt-core and dbt-postgres
|
||||||
name: Determine Docker Package
|
name: Determine Docker Package
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
needs: [pypi-release]
|
needs: [pypi-release]
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.determine-docker-package.outputs.matrix }}
|
matrix: ${{ steps.determine-docker-package.outputs.matrix }}
|
||||||
@@ -247,3 +258,24 @@ jobs:
|
|||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||||
|
|
||||||
|
testing-slack-notification:
|
||||||
|
# sends notifications to #slackbot-test
|
||||||
|
name: Testing - Slack Notification
|
||||||
|
if: ${{ failure() && inputs.test_run && !inputs.nightly_release }}
|
||||||
|
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
bump-version-generate-changelog,
|
||||||
|
build-test-package,
|
||||||
|
github-release,
|
||||||
|
pypi-release,
|
||||||
|
docker-release,
|
||||||
|
]
|
||||||
|
|
||||||
|
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||||
|
with:
|
||||||
|
status: "failure"
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }}
|
||||||
|
|||||||
49
.github/workflows/schema-check.yml
vendored
49
.github/workflows/schema-check.yml
vendored
@@ -9,15 +9,21 @@
|
|||||||
# occur so we want to proactively alert to it.
|
# occur so we want to proactively alert to it.
|
||||||
#
|
#
|
||||||
# **when?**
|
# **when?**
|
||||||
# On pushes to `develop` and release branches. Manual runs are also enabled.
|
# Only can be run manually
|
||||||
name: Artifact Schema Check
|
name: Artifact Schema Check
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
# pull_request:
|
||||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
# types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
# paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
target_branch:
|
||||||
|
description: "The branch to check against"
|
||||||
|
type: string
|
||||||
|
default: "main"
|
||||||
|
required: true
|
||||||
|
|
||||||
# no special access is needed
|
# no special access is needed
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
@@ -30,23 +36,24 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
checking-schemas:
|
checking-schemas:
|
||||||
name: "Checking schemas"
|
name: "Post-merge schema changes required"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Checkout dbt repo
|
- name: Checkout dbt repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||||
|
ref: ${{ inputs.target_branch }}
|
||||||
|
|
||||||
- name: Check for changes in core/dbt/artifacts
|
- name: Check for changes in core/dbt/artifacts
|
||||||
# https://github.com/marketplace/actions/paths-changes-filter
|
# https://github.com/marketplace/actions/paths-changes-filter
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # dorny/paths-filter@v3
|
||||||
id: check_artifact_changes
|
id: check_artifact_changes
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
@@ -62,21 +69,19 @@ jobs:
|
|||||||
|
|
||||||
- name: Checkout schemas.getdbt.com repo
|
- name: Checkout schemas.getdbt.com repo
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: dbt-labs/schemas.getdbt.com
|
repository: dbt-labs/schemas.getdbt.com
|
||||||
ref: 'main'
|
ref: "main"
|
||||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||||
|
|
||||||
- name: Generate current schema
|
- name: Generate current schema
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
cd ${{ env.DBT_REPO_DIRECTORY }}/core
|
||||||
python3 -m venv env
|
pip install --upgrade pip hatch
|
||||||
source env/bin/activate
|
hatch run setup
|
||||||
pip install --upgrade pip
|
hatch run json-schema -- --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
|
||||||
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
|
||||||
|
|
||||||
# Copy generated schema files into the schemas.getdbt.com repo
|
# Copy generated schema files into the schemas.getdbt.com repo
|
||||||
# Do a git diff to find any changes
|
# Do a git diff to find any changes
|
||||||
@@ -89,8 +94,8 @@ jobs:
|
|||||||
git diff -I='*[0-9]{4}-[0-9]{2}-[0-9]{2}' -I='*[0-9]+\.[0-9]+\.[0-9]+' --exit-code > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
git diff -I='*[0-9]{4}-[0-9]{2}-[0-9]{2}' -I='*[0-9]+\.[0-9]+\.[0-9]+' --exit-code > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||||
|
|
||||||
- name: Upload schema diff
|
- name: Upload schema diff
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
||||||
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
||||||
with:
|
with:
|
||||||
name: 'schema_changes.txt'
|
name: "schema_changes.txt"
|
||||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
path: "${{ env.SCHEMA_DIFF_ARTIFACT }}"
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ on:
|
|||||||
- "*.latest"
|
- "*.latest"
|
||||||
- "releases/*"
|
- "releases/*"
|
||||||
pull_request:
|
pull_request:
|
||||||
|
merge_group:
|
||||||
|
types: [checks_requested]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
@@ -45,7 +47,7 @@ jobs:
|
|||||||
# run the performance measurements on the current or default branch
|
# run the performance measurements on the current or default branch
|
||||||
test-schema:
|
test-schema:
|
||||||
name: Test Log Schema
|
name: Test Log Schema
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
needs:
|
needs:
|
||||||
- integration-metadata
|
- integration-metadata
|
||||||
@@ -67,26 +69,49 @@ jobs:
|
|||||||
DBT_TEST_USER_2: dbt_test_user_2
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
DBT_TEST_USER_3: dbt_test_user_3
|
DBT_TEST_USER_3: dbt_test_user_3
|
||||||
|
|
||||||
|
services:
|
||||||
|
# Label used to access the service container
|
||||||
|
postgres:
|
||||||
|
# Docker Hub image
|
||||||
|
image: postgres
|
||||||
|
# Provide the password for postgres
|
||||||
|
env:
|
||||||
|
POSTGRES_PASSWORD: password
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
# Set health checks to wait until postgres has started
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: checkout dev
|
- name: checkout dev
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.9"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
pip --version
|
pip --version
|
||||||
pip install tox
|
pip install hatch
|
||||||
tox --version
|
hatch --version
|
||||||
|
|
||||||
- name: Set up postgres
|
- name: Run postgres setup script
|
||||||
uses: ./.github/actions/setup-postgres-linux
|
run: |
|
||||||
|
./scripts/setup_db.sh
|
||||||
|
env:
|
||||||
|
PGHOST: localhost
|
||||||
|
PGPORT: 5432
|
||||||
|
PGPASSWORD: password
|
||||||
|
|
||||||
- name: ls
|
- name: ls
|
||||||
run: ls
|
run: ls
|
||||||
@@ -94,11 +119,11 @@ jobs:
|
|||||||
# integration tests generate a ton of logs in different files. the next step will find them all.
|
# integration tests generate a ton of logs in different files. the next step will find them all.
|
||||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
uses: nick-fields/retry@v3
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
with:
|
with:
|
||||||
timeout_minutes: 30
|
timeout_minutes: 30
|
||||||
max_attempts: 3
|
max_attempts: 3
|
||||||
command: tox -e integration -- -nauto
|
command: cd core && hatch run ci:integration-tests -- -nauto
|
||||||
env:
|
env:
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
|
|
||||||
|
|||||||
60
.github/workflows/test-repeater.yml
vendored
60
.github/workflows/test-repeater.yml
vendored
@@ -14,34 +14,33 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
branch:
|
branch:
|
||||||
description: 'Branch to check out'
|
description: "Branch to check out"
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
default: 'main'
|
default: "main"
|
||||||
test_path:
|
test_path:
|
||||||
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
description: "Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)"
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
default: 'tests/functional/...'
|
default: "tests/functional/..."
|
||||||
python_version:
|
python_version:
|
||||||
description: 'Version of Python to Test Against'
|
description: "Version of Python to Test Against"
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- '3.9'
|
- "3.10"
|
||||||
- '3.10'
|
- "3.11"
|
||||||
- '3.11'
|
|
||||||
os:
|
os:
|
||||||
description: 'OS to run test in'
|
description: "OS to run test in"
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- 'ubuntu-latest'
|
- "ubuntu-latest"
|
||||||
- 'macos-14'
|
- "macos-14"
|
||||||
- 'windows-latest'
|
- "windows-latest"
|
||||||
num_runs_per_batch:
|
num_runs_per_batch:
|
||||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
description: "Max number of times to run the test per batch. We always run 10 batches."
|
||||||
type: number
|
type: number
|
||||||
required: true
|
required: true
|
||||||
default: '50'
|
default: "50"
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
@@ -51,7 +50,7 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
debug:
|
debug:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||||
steps:
|
steps:
|
||||||
- name: "[DEBUG] Output Inputs"
|
- name: "[DEBUG] Output Inputs"
|
||||||
run: |
|
run: |
|
||||||
@@ -82,26 +81,37 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout code"
|
- name: "Checkout code"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
|
|
||||||
- name: "Setup Python"
|
- name: "Setup Python"
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "${{ inputs.python_version }}"
|
python-version: "${{ inputs.python_version }}"
|
||||||
|
|
||||||
|
- name: "Install hatch"
|
||||||
|
run: python -m pip install --user --upgrade pip hatch
|
||||||
|
|
||||||
- name: "Setup Dev Environment"
|
- name: "Setup Dev Environment"
|
||||||
run: make dev
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run setup
|
||||||
|
|
||||||
- name: "Set up postgres (linux)"
|
- name: "Set up postgres (linux)"
|
||||||
if: inputs.os == 'ubuntu-latest'
|
if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
|
||||||
run: make setup-db
|
run: |
|
||||||
|
cd core
|
||||||
|
hatch run setup-db
|
||||||
|
|
||||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||||
- name: "Set up postgres (macos)"
|
- name: Set up postgres (macos)
|
||||||
if: inputs.os == 'macos-14'
|
if: runner.os == 'macOS'
|
||||||
uses: ./.github/actions/setup-postgres-macos
|
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||||
|
with:
|
||||||
|
timeout_minutes: 10
|
||||||
|
max_attempts: 3
|
||||||
|
command: ./scripts/setup_db.sh
|
||||||
|
|
||||||
- name: "Set up postgres (windows)"
|
- name: "Set up postgres (windows)"
|
||||||
if: inputs.os == 'windows-latest'
|
if: inputs.os == 'windows-latest'
|
||||||
@@ -150,5 +160,5 @@ jobs:
|
|||||||
- name: "Error for Failures"
|
- name: "Error for Failures"
|
||||||
if: ${{ steps.pytest.outputs.failure }}
|
if: ${{ steps.pytest.outputs.failure }}
|
||||||
run: |
|
run: |
|
||||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||||
exit 1
|
exit 1
|
||||||
|
|||||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -15,6 +15,7 @@ build/
|
|||||||
!core/dbt/docs/build
|
!core/dbt/docs/build
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
|
dist-*/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
@@ -57,6 +58,9 @@ test.env
|
|||||||
makefile.test.env
|
makefile.test.env
|
||||||
*.pytest_cache/
|
*.pytest_cache/
|
||||||
|
|
||||||
|
# Unit test artifacts
|
||||||
|
index.html
|
||||||
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
@@ -92,6 +96,7 @@ target/
|
|||||||
# pycharm
|
# pycharm
|
||||||
.idea/
|
.idea/
|
||||||
venv/
|
venv/
|
||||||
|
.venv*/
|
||||||
|
|
||||||
# AWS credentials
|
# AWS credentials
|
||||||
.aws/
|
.aws/
|
||||||
@@ -105,3 +110,6 @@ venv/
|
|||||||
|
|
||||||
# poetry
|
# poetry
|
||||||
poetry.lock
|
poetry.lock
|
||||||
|
|
||||||
|
# asdf
|
||||||
|
.tool-versions
|
||||||
|
|||||||
4
.isort.cfg
Normal file
4
.isort.cfg
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[settings]
|
||||||
|
profile=black
|
||||||
|
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
||||||
|
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces
|
||||||
@@ -1,62 +1,91 @@
|
|||||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||||
|
|
||||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||||
|
|
||||||
# Force all unspecified python hooks to run python 3.9
|
# Force all unspecified python hooks to run python 3.10
|
||||||
default_language_version:
|
default_language_version:
|
||||||
python: python3
|
python: python3
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v3.2.0
|
rev: v3.2.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
args: [--unsafe]
|
args: [--unsafe]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: trailing-whitespace
|
exclude: schemas/dbt/manifest/
|
||||||
exclude_types:
|
- id: trailing-whitespace
|
||||||
- "markdown"
|
exclude_types:
|
||||||
- id: check-case-conflict
|
- "markdown"
|
||||||
- repo: https://github.com/psf/black
|
- id: check-case-conflict
|
||||||
rev: 22.3.0
|
# local hooks are used to run the hooks in the local environment instead of a pre-commit isolated one.
|
||||||
hooks:
|
# This ensures that the hooks are run with the same version of the dependencies as the local environment
|
||||||
- id: black
|
# without having to manually keep them in sync.
|
||||||
- id: black
|
- repo: local
|
||||||
alias: black-check
|
hooks:
|
||||||
stages: [manual]
|
# Formatter/linter/type-checker pins live in the pyproject.dev optional dependency.
|
||||||
args:
|
- id: isort
|
||||||
- "--check"
|
name: isort
|
||||||
- "--diff"
|
entry: python -m isort
|
||||||
- repo: https://github.com/pycqa/flake8
|
language: system
|
||||||
rev: 4.0.1
|
types: [python]
|
||||||
hooks:
|
- id: black
|
||||||
- id: flake8
|
name: black
|
||||||
- id: flake8
|
entry: python -m black
|
||||||
alias: flake8-check
|
language: system
|
||||||
stages: [manual]
|
types: [python]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- id: black-check
|
||||||
rev: v1.4.1
|
name: black-check
|
||||||
hooks:
|
entry: python -m black
|
||||||
- id: mypy
|
args:
|
||||||
# N.B.: Mypy is... a bit fragile.
|
- "--check"
|
||||||
#
|
- "--diff"
|
||||||
# By using `language: system` we run this hook in the local
|
language: system
|
||||||
# environment instead of a pre-commit isolated one. This is needed
|
stages: [manual]
|
||||||
# to ensure mypy correctly parses the project.
|
types: [python]
|
||||||
|
- id: flake8
|
||||||
# It may cause trouble
|
name: flake8
|
||||||
# in that it adds environmental variables out of our control to the
|
entry: python -m flake8
|
||||||
# mix. Unfortunately, there's nothing we can do about per pre-commit's
|
language: system
|
||||||
# author.
|
types: [python]
|
||||||
# See https://github.com/pre-commit/pre-commit/issues/730 for details.
|
- id: flake8-check
|
||||||
args: [--show-error-codes]
|
name: flake8-check
|
||||||
files: ^core/dbt/
|
entry: python -m flake8
|
||||||
language: system
|
language: system
|
||||||
- id: mypy
|
stages: [manual]
|
||||||
alias: mypy-check
|
types: [python]
|
||||||
stages: [manual]
|
# N.B.: Mypy is... a bit fragile.
|
||||||
args: [--show-error-codes, --pretty]
|
#
|
||||||
files: ^core/dbt/
|
# By using `language: system` we run this hook in the local
|
||||||
language: system
|
# environment instead of a pre-commit isolated one. This is needed
|
||||||
|
# to ensure mypy correctly parses the project.
|
||||||
|
#
|
||||||
|
# It may cause trouble
|
||||||
|
# in that it adds environmental variables out of our control to the
|
||||||
|
# mix. Unfortunately, there's nothing we can do about per pre-commit's
|
||||||
|
# author.
|
||||||
|
# See https://github.com/pre-commit/pre-commit/issues/730 for details.
|
||||||
|
- id: mypy
|
||||||
|
name: mypy
|
||||||
|
entry: python -m mypy
|
||||||
|
args: [--show-error-codes]
|
||||||
|
files: ^core/dbt/
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
- id: mypy-check
|
||||||
|
name: mypy-check
|
||||||
|
entry: python -m mypy
|
||||||
|
args: [--show-error-codes, --pretty]
|
||||||
|
files: ^core/dbt/
|
||||||
|
language: system
|
||||||
|
stages: [manual]
|
||||||
|
types: [python]
|
||||||
|
- id: no_versioned_artifact_resource_imports
|
||||||
|
name: no_versioned_artifact_resource_imports
|
||||||
|
entry: python scripts/pre-commit-hooks/no_versioned_artifact_resource_imports.py
|
||||||
|
language: system
|
||||||
|
files: ^core/dbt/
|
||||||
|
types: [python]
|
||||||
|
pass_filenames: true
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ Most of the python code in the repository is within the `core/dbt` directory.
|
|||||||
- [`single python files`](core/dbt/README.md): A number of individual files, such as 'compilation.py' and 'exceptions.py'
|
- [`single python files`](core/dbt/README.md): A number of individual files, such as 'compilation.py' and 'exceptions.py'
|
||||||
|
|
||||||
The main subdirectories of core/dbt:
|
The main subdirectories of core/dbt:
|
||||||
- [`adapters`](core/dbt/adapters/README.md): Define base classes for behavior that is likely to differ across databases
|
|
||||||
- [`clients`](core/dbt/clients/README.md): Interface with dependencies (agate, jinja) or across operating systems
|
- [`clients`](core/dbt/clients/README.md): Interface with dependencies (agate, jinja) or across operating systems
|
||||||
- [`config`](core/dbt/config/README.md): Reconcile user-supplied configuration from connection profiles, project files, and Jinja macros
|
- [`config`](core/dbt/config/README.md): Reconcile user-supplied configuration from connection profiles, project files, and Jinja macros
|
||||||
- [`context`](core/dbt/context/README.md): Build and expose dbt-specific Jinja functionality
|
- [`context`](core/dbt/context/README.md): Build and expose dbt-specific Jinja functionality
|
||||||
@@ -14,14 +13,10 @@ The main subdirectories of core/dbt:
|
|||||||
- [`deps`](core/dbt/deps/README.md): Package installation and dependency resolution
|
- [`deps`](core/dbt/deps/README.md): Package installation and dependency resolution
|
||||||
- [`events`](core/dbt/events/README.md): Logging events
|
- [`events`](core/dbt/events/README.md): Logging events
|
||||||
- [`graph`](core/dbt/graph/README.md): Produce a `networkx` DAG of project resources, and selecting those resources given user-supplied criteria
|
- [`graph`](core/dbt/graph/README.md): Produce a `networkx` DAG of project resources, and selecting those resources given user-supplied criteria
|
||||||
- [`include`](core/dbt/include/README.md): The dbt "global project," which defines default implementations of Jinja2 macros
|
- [`include`](core/dbt/include/README.md): Set up the starter project scaffold.
|
||||||
- [`parser`](core/dbt/parser/README.md): Read project files, validate, construct python objects
|
- [`parser`](core/dbt/parser/README.md): Read project files, validate, construct python objects
|
||||||
- [`task`](core/dbt/task/README.md): Set forth the actions that dbt can perform when invoked
|
- [`task`](core/dbt/task/README.md): Set forth the actions that dbt can perform when invoked
|
||||||
|
|
||||||
Legacy tests are found in the 'test' directory:
|
|
||||||
- [`unit tests`](core/dbt/test/unit/README.md): Unit tests
|
|
||||||
- [`integration tests`](core/dbt/test/integration/README.md): Integration tests
|
|
||||||
|
|
||||||
### Invoking dbt
|
### Invoking dbt
|
||||||
|
|
||||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||||
@@ -32,7 +27,7 @@ This is the docs website code. It comes from the dbt-docs repository, and is gen
|
|||||||
## Adapters
|
## Adapters
|
||||||
|
|
||||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc.
|
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc.
|
||||||
Note: dbt-postgres used to exist in dbt-core but is now in [its own repo](https://github.com/dbt-labs/dbt-postgres)
|
Note: dbt-postgres used to exist in dbt-core but is now in [the dbt-adapters repo](https://github.com/dbt-labs/dbt-adapters/tree/main/dbt-postgres)
|
||||||
|
|
||||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||||
|
|
||||||
@@ -40,16 +35,15 @@ Each adapter plugin is a standalone python package that includes:
|
|||||||
|
|
||||||
- `dbt/include/[name]`: A "sub-global" dbt project, of YAML and SQL files, that reimplements Jinja macros to use the adapter's supported SQL syntax
|
- `dbt/include/[name]`: A "sub-global" dbt project, of YAML and SQL files, that reimplements Jinja macros to use the adapter's supported SQL syntax
|
||||||
- `dbt/adapters/[name]`: Python modules that inherit, and optionally reimplement, the base adapter classes defined in dbt-core
|
- `dbt/adapters/[name]`: Python modules that inherit, and optionally reimplement, the base adapter classes defined in dbt-core
|
||||||
- `setup.py`
|
- `pyproject.toml`
|
||||||
|
|
||||||
The Postgres adapter code is the most central, and many of its implementations are used as the default defined in the dbt-core global project. The greater the distance of a data technology from Postgres, the more its adapter plugin may need to reimplement.
|
The Postgres adapter code is the most central, and many of its implementations are used as the default defined in the dbt-core global project. The greater the distance of a data technology from Postgres, the more its adapter plugin may need to reimplement.
|
||||||
|
|
||||||
## Testing dbt
|
## Testing dbt
|
||||||
|
|
||||||
The [`test/`](test/) subdirectory includes unit and integration tests that run as continuous integration checks against open pull requests. Unit tests check mock inputs and outputs of specific python functions. Integration tests perform end-to-end dbt invocations against real adapters (Postgres, Redshift, Snowflake, BigQuery) and assert that the results match expectations. See [the contributing guide](CONTRIBUTING.md) for a step-by-step walkthrough of setting up a local development and testing environment.
|
The [`tests/`](tests/) subdirectory includes unit and fuctional tests that run as continuous integration checks against open pull requests. Unit tests check mock inputs and outputs of specific python functions. Functional tests perform end-to-end dbt invocations against real adapters (Postgres) and assert that the results match expectations. See [the contributing guide](CONTRIBUTING.md) for a step-by-step walkthrough of setting up a local development and testing environment.
|
||||||
|
|
||||||
## Everything else
|
## Everything else
|
||||||
|
|
||||||
- [docker](docker/): All dbt versions are published as Docker images on DockerHub. This subfolder contains the `Dockerfile` (constant) and `requirements.txt` (one for each version).
|
- [docker](docker/): All dbt versions are published as Docker images on DockerHub. This subfolder contains the `Dockerfile` (constant) and `requirements.txt` (one for each version).
|
||||||
- [etc](etc/): Images for README
|
|
||||||
- [scripts](scripts/): Helper scripts for testing, releasing, and producing JSON schemas. These are not included in distributions of dbt, nor are they rigorously tested—they're just handy tools for the dbt maintainers :)
|
- [scripts](scripts/): Helper scripts for testing, releasing, and producing JSON schemas. These are not included in distributions of dbt, nor are they rigorously tested—they're just handy tools for the dbt maintainers :)
|
||||||
|
|||||||
381
CHANGELOG.md
381
CHANGELOG.md
@@ -5,387 +5,14 @@
|
|||||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|
||||||
## dbt-core 1.8.9 - November 21, 2024
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Pin dbt-common and dbt-adapters with upper bound of 2.0. ([#11024](https://github.com/dbt-labs/dbt-core/issues/11024))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.8.8 - October 23, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Fix unit tests for incremental model with alias ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Remove support and testing for Python 3.8, which is now EOL. ([#10861](https://github.com/dbt-labs/dbt-core/issues/10861))
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Pin dbt-common and dbt-adapters with upper bound. ([#10895](https://github.com/dbt-labs/dbt-core/issues/10895))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@katsugeneration](https://github.com/katsugeneration) ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
|
||||||
|
|
||||||
## dbt-core 1.8.7 - September 24, 2024
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Add support for behavior flags ([#10618](https://github.com/dbt-labs/dbt-core/issues/10618))
|
|
||||||
|
|
||||||
## dbt-core 1.8.6 - August 29, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Late render pre- and post-hooks configs in properties / schema YAML files ([#10603](https://github.com/dbt-labs/dbt-core/issues/10603))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Improve speed of tree traversal when finding children, increasing build speed for some selectors ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@ttusing](https://github.com/ttusing) ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
|
||||||
|
|
||||||
## dbt-core 1.8.5 - August 07, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- respect --quiet and --warn-error-options for flag deprecations ([#10105](https://github.com/dbt-labs/dbt-core/issues/10105))
|
|
||||||
|
|
||||||
## dbt-core 1.8.4 - July 18, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags ([#10160](https://github.com/dbt-labs/dbt-core/issues/10160))
|
|
||||||
- Limit data_tests deprecation to root_project ([#9835](https://github.com/dbt-labs/dbt-core/issues/9835))
|
|
||||||
- CLI flags should take precedence over env var flags ([#10304](https://github.com/dbt-labs/dbt-core/issues/10304))
|
|
||||||
- Fix error constructing warn_error_options ([#10452](https://github.com/dbt-labs/dbt-core/issues/10452))
|
|
||||||
|
|
||||||
## dbt-core 1.8.3 - June 20, 2024
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- add --empty value to jinja context as flags.EMPTY ([#10317](https://github.com/dbt-labs/dbt-core/issues/10317))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- DOn't warn on `unit_test` config paths that are properly used ([#10311](https://github.com/dbt-labs/dbt-core/issues/10311))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Fix npm security vulnerabilities as of June 2024 ([dbt-docs/#513](https://github.com/dbt-labs/dbt-docs/issues/513))
|
|
||||||
|
|
||||||
## dbt-core 1.8.2 - June 05, 2024
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Add --host flag to dbt docs serve, defaulting to '127.0.0.1' ([#10229](https://github.com/dbt-labs/dbt-core/issues/10229))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Fix: Order-insensitive unit test equality assertion for expected/actual with multiple nulls ([#10167](https://github.com/dbt-labs/dbt-core/issues/10167))
|
|
||||||
|
|
||||||
## dbt-core 1.8.1 - May 22, 2024
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Add resource type to saved_query ([#10168](https://github.com/dbt-labs/dbt-core/issues/10168))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
|
||||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
|
||||||
- Add support for Saved Query node ([dbt-docs/#486](https://github.com/dbt-labs/dbt-docs/issues/486))
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Explicitly bind to localhost in docs serve ([#10209](https://github.com/dbt-labs/dbt-core/issues/10209))
|
|
||||||
|
|
||||||
## dbt-core 1.8.0 - May 09, 2024
|
|
||||||
|
|
||||||
### Breaking Changes
|
|
||||||
|
|
||||||
- Remove adapter.get_compiler interface ([#9148](https://github.com/dbt-labs/dbt-core/issues/9148))
|
|
||||||
- Move AdapterLogger to adapters folder ([#9151](https://github.com/dbt-labs/dbt-core/issues/9151))
|
|
||||||
- Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps --lock' ([#9100](https://github.com/dbt-labs/dbt-core/issues/9100))
|
|
||||||
- move event manager setup back to core, remove ref to global EVENT_MANAGER and clean up event manager functions ([#9150](https://github.com/dbt-labs/dbt-core/issues/9150))
|
|
||||||
- Remove dbt-tests-adapter and dbt-postgres packages from dbt-core ([#9455](https://github.com/dbt-labs/dbt-core/issues/9455))
|
|
||||||
- Update the default behaviour of require_explicit_package_overrides_for_builtin_materializations to True. ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Initial implementation of unit testing ([#8287](https://github.com/dbt-labs/dbt-core/issues/8287))
|
|
||||||
- Unit test manifest artifacts and selection ([#8295](https://github.com/dbt-labs/dbt-core/issues/8295))
|
|
||||||
- Support config with tags & meta for unit tests ([#8294](https://github.com/dbt-labs/dbt-core/issues/8294))
|
|
||||||
- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859))
|
|
||||||
- Enable inline csv fixtures in unit tests ([#8626](https://github.com/dbt-labs/dbt-core/issues/8626))
|
|
||||||
- Add drop_schema_named macro ([#8025](https://github.com/dbt-labs/dbt-core/issues/8025))
|
|
||||||
- migrate utils to common and adapters folders ([#8924](https://github.com/dbt-labs/dbt-core/issues/8924))
|
|
||||||
- Move Agate helper client into common ([#8926](https://github.com/dbt-labs/dbt-core/issues/8926))
|
|
||||||
- remove usage of dbt.config.PartialProject from dbt/adapters ([#8928](https://github.com/dbt-labs/dbt-core/issues/8928))
|
|
||||||
- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
|
||||||
- Support unit testing incremental models ([#8422](https://github.com/dbt-labs/dbt-core/issues/8422))
|
|
||||||
- Add support of csv file fixtures to unit testing ([#8290](https://github.com/dbt-labs/dbt-core/issues/8290))
|
|
||||||
- Remove legacy logger ([#8027](https://github.com/dbt-labs/dbt-core/issues/8027))
|
|
||||||
- Unit tests support --defer and state:modified ([#8517](https://github.com/dbt-labs/dbt-core/issues/8517))
|
|
||||||
- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956))
|
|
||||||
- Support source inputs in unit tests ([#8507](https://github.com/dbt-labs/dbt-core/issues/8507))
|
|
||||||
- Use daff to render diff displayed in stdout when unit test fails ([#8558](https://github.com/dbt-labs/dbt-core/issues/8558))
|
|
||||||
- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
|
||||||
- Move unit testing to test command ([#8979](https://github.com/dbt-labs/dbt-core/issues/8979))
|
|
||||||
- Support --empty flag for schema-only dry runs ([#8971](https://github.com/dbt-labs/dbt-core/issues/8971))
|
|
||||||
- Support unit tests in non-root packages ([#8285](https://github.com/dbt-labs/dbt-core/issues/8285))
|
|
||||||
- Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. in schema files. ([#8699](https://github.com/dbt-labs/dbt-core/issues/8699))
|
|
||||||
- Make fixture files full-fledged parts of the manifest and enable partial parsing ([#9067](https://github.com/dbt-labs/dbt-core/issues/9067))
|
|
||||||
- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
|
||||||
- Package selector syntax for the current package ([#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
|
||||||
- In build command run unit tests before models ([#9128](https://github.com/dbt-labs/dbt-core/issues/9128))
|
|
||||||
- Move flags from UserConfig in profiles.yml to flags in dbt_project.yml ([#9183](https://github.com/dbt-labs/dbt-core/issues/9183))
|
|
||||||
- Added hook support for `dbt source freshness` ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
|
||||||
- Align with order of unit test output when `actual` differs from `expected` ([#9370](https://github.com/dbt-labs/dbt-core/issues/9370))
|
|
||||||
- Added support for external nodes in unit test nodes ([#8944](https://github.com/dbt-labs/dbt-core/issues/8944))
|
|
||||||
- Enable unit testing versioned models ([#9344](https://github.com/dbt-labs/dbt-core/issues/9344))
|
|
||||||
- Enable list command for unit tests ([#8508](https://github.com/dbt-labs/dbt-core/issues/8508))
|
|
||||||
- Integration Test Optimizations ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
|
||||||
- Accelerate integration tests with caching. ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
|
||||||
- Cache environment variables ([#9489](https://github.com/dbt-labs/dbt-core/issues/9489))
|
|
||||||
- Support meta at the config level for Metric nodes ([#9441](https://github.com/dbt-labs/dbt-core/issues/9441))
|
|
||||||
- Add cache to SavedQuery config ([#9540](https://github.com/dbt-labs/dbt-core/issues/9540))
|
|
||||||
- Support scrubbing secret vars ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
|
||||||
- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237))
|
|
||||||
- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766))
|
|
||||||
- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804))
|
|
||||||
- Add wildcard support to the group selector method ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
|
||||||
- source freshness precomputes metadata-based freshness in batch, if possible ([#8705](https://github.com/dbt-labs/dbt-core/issues/8705))
|
|
||||||
- Better error message when trying to select a disabled model ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
|
||||||
- Support SQL in unit testing fixtures ([#9405](https://github.com/dbt-labs/dbt-core/issues/9405))
|
|
||||||
- Add require_explicit_package_overrides_for_builtin_materializations to dbt_project.yml flags, which can be used to opt-out of overriding built-in materializations from packages ([#10007](https://github.com/dbt-labs/dbt-core/issues/10007))
|
|
||||||
- add --empty flag to dbt build command ([#10026](https://github.com/dbt-labs/dbt-core/issues/10026))
|
|
||||||
- Ability to `silence` warnings via `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
|
||||||
- Allow aliases `error` for `include` and `warn` for `exclude` in `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
|
||||||
- Add unit_test: selection method ([#10053](https://github.com/dbt-labs/dbt-core/issues/10053))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- For packages installed with tarball method, fetch metadata to resolve nested dependencies ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
|
||||||
- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859))
|
|
||||||
- Handle unknown `type_code` for model contracts ([#8877](https://github.com/dbt-labs/dbt-core/issues/8877), [#8353](https://github.com/dbt-labs/dbt-core/issues/8353))
|
|
||||||
- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846))
|
|
||||||
- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857))
|
|
||||||
- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836))
|
|
||||||
- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939))
|
|
||||||
- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864))
|
|
||||||
- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895))
|
|
||||||
- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974))
|
|
||||||
- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010))
|
|
||||||
- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016))
|
|
||||||
- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000))
|
|
||||||
- Use seed file from disk for unit testing if rows not specified in YAML config ([#8652](https://github.com/dbt-labs/dbt-core/issues/8652))
|
|
||||||
- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062))
|
|
||||||
- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050))
|
|
||||||
- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127))
|
|
||||||
- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991))
|
|
||||||
- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119))
|
|
||||||
- Fix parsing f-strings in python models ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
|
||||||
- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
|
||||||
- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
|
||||||
- fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
|
||||||
- Support reasonably long unit test names ([#9015](https://github.com/dbt-labs/dbt-core/issues/9015))
|
|
||||||
- Fix back-compat parsing for model-level 'tests', source table-level 'tests', and 'tests' defined on model versions ([#9411](https://github.com/dbt-labs/dbt-core/issues/9411))
|
|
||||||
- Fix retry command run from CLI ([#9444](https://github.com/dbt-labs/dbt-core/issues/9444))
|
|
||||||
- Fix seed and source selection in `dbt docs generate` ([#9161](https://github.com/dbt-labs/dbt-core/issues/9161))
|
|
||||||
- Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection during docs generate ([#9456](https://github.com/dbt-labs/dbt-core/issues/9456))
|
|
||||||
- Fix node type plurals in FoundStats log message ([#9464](https://github.com/dbt-labs/dbt-core/issues/9464))
|
|
||||||
- Run manifest upgrade preprocessing on any older manifest version, including v11 ([#9487](https://github.com/dbt-labs/dbt-core/issues/9487))
|
|
||||||
- Update 'compiled_code' context member logic to route based on command ('clone' or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. ([#9502](https://github.com/dbt-labs/dbt-core/issues/9502))
|
|
||||||
- Fix bug where Semantic Layer filter strings are parsed into lists. ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507))
|
|
||||||
- Initialize invocation context before test fixtures are built. ([##9489](https://github.com/dbt-labs/dbt-core/issues/#9489))
|
|
||||||
- Fix conflict with newer versions of Snowplow tracker ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
|
||||||
- When patching versioned models, set constraints after config ([#9364](https://github.com/dbt-labs/dbt-core/issues/9364))
|
|
||||||
- only include unmodified semantic mdodels in state:modified selection ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
|
||||||
- Set query headers when manifest is passed in to dbtRunner ([#9546](https://github.com/dbt-labs/dbt-core/issues/9546))
|
|
||||||
- Store node_info in node associated logging events ([#9557](https://github.com/dbt-labs/dbt-core/issues/9557))
|
|
||||||
- Fix Semantic Model Compare node relations ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
|
||||||
- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583))
|
|
||||||
- Clearer no-op logging in stubbed SavedQueryRunner ([#9533](https://github.com/dbt-labs/dbt-core/issues/9533))
|
|
||||||
- Fix node_info contextvar handling so incorrect node_info doesn't persist ([#8866](https://github.com/dbt-labs/dbt-core/issues/8866))
|
|
||||||
- Add target-path to retry ([#8948](https://github.com/dbt-labs/dbt-core/issues/8948))
|
|
||||||
- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360))
|
|
||||||
- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581))
|
|
||||||
- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
|
||||||
- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
|
||||||
- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860))
|
|
||||||
- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532))
|
|
||||||
- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755))
|
|
||||||
- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624))
|
|
||||||
- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511))
|
|
||||||
- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593))
|
|
||||||
- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770))
|
|
||||||
- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787))
|
|
||||||
- Only create the packages-install-path / dbt_packages folder during dbt deps ([#6985](https://github.com/dbt-labs/dbt-core/issues/6985), [#9584](https://github.com/dbt-labs/dbt-core/issues/9584))
|
|
||||||
- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608))
|
|
||||||
- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078))
|
|
||||||
- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827))
|
|
||||||
- Exclude password-like fields for considering reparse ([#9795](https://github.com/dbt-labs/dbt-core/issues/9795))
|
|
||||||
- Fixed query comments test ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
|
||||||
- Begin warning people about spaces in model names ([#9397](https://github.com/dbt-labs/dbt-core/issues/9397))
|
|
||||||
- Add NodeRelation to SavedQuery Export ([#9534](https://github.com/dbt-labs/dbt-core/issues/9534))
|
|
||||||
- Disambiguiate FreshnessConfigProblem error message ([#9891](https://github.com/dbt-labs/dbt-core/issues/9891))
|
|
||||||
- Use consistent secret scrubbing with the log function. ([#9987](https://github.com/dbt-labs/dbt-core/issues/9987))
|
|
||||||
- Validate against empty strings in package definitions ([#9985](https://github.com/dbt-labs/dbt-core/issues/9985))
|
|
||||||
- Fix default value for indirect selection in selector cannot overwritten by CLI flag and env var ([#9976](https://github.com/dbt-labs/dbt-core/issues/9976), [#7673](https://github.com/dbt-labs/dbt-core/issues/7673))
|
|
||||||
- Simplify error message if test severity isn't 'warn' or 'error' ([#9715](https://github.com/dbt-labs/dbt-core/issues/9715))
|
|
||||||
- Support overriding source level loaded_at_field with a null table level definition ([#9320](https://github.com/dbt-labs/dbt-core/issues/9320))
|
|
||||||
- Undo conditional agate import to prevent UnresolvedTypeReferenceError during RunResult serialization ([#10098](https://github.com/dbt-labs/dbt-core/issues/10098))
|
|
||||||
- Restore previous behavior for --favor-state: only favor defer_relation if not selected in current command" ([#10107](https://github.com/dbt-labs/dbt-core/issues/10107))
|
|
||||||
- Unit test fixture (csv) returns null for empty value ([#9881](https://github.com/dbt-labs/dbt-core/issues/9881))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430))
|
|
||||||
- fix get_custom_database docstring ([dbt-docs/#9003](https://github.com/dbt-labs/dbt-docs/issues/9003))
|
|
||||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
|
||||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537))
|
|
||||||
- Add unit testing functional tests ([#8512](https://github.com/dbt-labs/dbt-core/issues/8512))
|
|
||||||
- Remove usage of dbt.include.global_project in dbt/adapters ([#8925](https://github.com/dbt-labs/dbt-core/issues/8925))
|
|
||||||
- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893))
|
|
||||||
- remove dbt.flags.MP_CONTEXT usage in dbt/adapters ([#8967](https://github.com/dbt-labs/dbt-core/issues/8967))
|
|
||||||
- Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters ([#8969](https://github.com/dbt-labs/dbt-core/issues/8969))
|
|
||||||
- Move CatalogRelationTypes test case to the shared test suite to be reused by adapter maintainers ([#8952](https://github.com/dbt-labs/dbt-core/issues/8952))
|
|
||||||
- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
|
||||||
- Removing unused 'documentable' ([#8871](https://github.com/dbt-labs/dbt-core/issues/8871))
|
|
||||||
- Remove use of dbt/core exceptions in dbt/adapter ([#8920](https://github.com/dbt-labs/dbt-core/issues/8920))
|
|
||||||
- Cache dbt plugin modules to improve integration test performance ([#9029](https://github.com/dbt-labs/dbt-core/issues/9029))
|
|
||||||
- Consolidate deferral methods & flags ([#7965](https://github.com/dbt-labs/dbt-core/issues/7965), [#8715](https://github.com/dbt-labs/dbt-core/issues/8715))
|
|
||||||
- Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock variance ([#9057](https://github.com/dbt-labs/dbt-core/issues/9057))
|
|
||||||
- Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific event types and protos ([#8927](https://github.com/dbt-labs/dbt-core/issues/8927), [#8918](https://github.com/dbt-labs/dbt-core/issues/8918))
|
|
||||||
- Clean up unused adaptor folders ([#9123](https://github.com/dbt-labs/dbt-core/issues/9123))
|
|
||||||
- Move column constraints into common/contracts, removing another dependency of adapters on core. ([#9024](https://github.com/dbt-labs/dbt-core/issues/9024))
|
|
||||||
- Move dbt.semver to dbt.common.semver and update references. ([#9039](https://github.com/dbt-labs/dbt-core/issues/9039))
|
|
||||||
- Move lowercase utils method to common ([#9180](https://github.com/dbt-labs/dbt-core/issues/9180))
|
|
||||||
- Remove usages of dbt.clients.jinja in dbt/adapters ([#9205](https://github.com/dbt-labs/dbt-core/issues/9205))
|
|
||||||
- Remove usage of dbt.contracts in dbt/adapters ([#9208](https://github.com/dbt-labs/dbt-core/issues/9208))
|
|
||||||
- Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters ([#9214](https://github.com/dbt-labs/dbt-core/issues/9214))
|
|
||||||
- Introduce RelationConfig Protocol, consolidate Relation.create_from ([#9215](https://github.com/dbt-labs/dbt-core/issues/9215))
|
|
||||||
- remove manifest from adapter.set_relations_cache signature ([#9217](https://github.com/dbt-labs/dbt-core/issues/9217))
|
|
||||||
- remove manifest from adapter catalog method signatures ([#9218](https://github.com/dbt-labs/dbt-core/issues/9218))
|
|
||||||
- Move BaseConfig, Metadata and various other contract classes from model_config to common/contracts/config ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
|
||||||
- Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro ([#9244](https://github.com/dbt-labs/dbt-core/issues/9244))
|
|
||||||
- pass query header context to MacroQueryStringSetter ([#9249](https://github.com/dbt-labs/dbt-core/issues/9249), [#9250](https://github.com/dbt-labs/dbt-core/issues/9250))
|
|
||||||
- add macro_context_generator on adapter ([#9247](https://github.com/dbt-labs/dbt-core/issues/9247))
|
|
||||||
- pass mp_context to adapter factory as argument instead of import ([#9025](https://github.com/dbt-labs/dbt-core/issues/9025))
|
|
||||||
- have dbt-postgres use RelationConfig protocol for materialized views' ([#9292](https://github.com/dbt-labs/dbt-core/issues/9292))
|
|
||||||
- move system.py to common as dbt-bigquery relies on it to call gcloud ([#9293](https://github.com/dbt-labs/dbt-core/issues/9293))
|
|
||||||
- Reorganizing event definitions to define core events in dbt/events rather than dbt/common ([#9152](https://github.com/dbt-labs/dbt-core/issues/9152))
|
|
||||||
- move exceptions used only in dbt/common to dbt/common/exceptions ([#9332](https://github.com/dbt-labs/dbt-core/issues/9332))
|
|
||||||
- Remove usage of dbt.adapters.factory in dbt/common ([#9334](https://github.com/dbt-labs/dbt-core/issues/9334))
|
|
||||||
- Accept valid_error_names in WarnErrorOptions constructor, remove global usage of event modules ([#9337](https://github.com/dbt-labs/dbt-core/issues/9337))
|
|
||||||
- Move result objects to dbt.artifacts ([#9193](https://github.com/dbt-labs/dbt-core/issues/9193))
|
|
||||||
- dbt Labs OSS standardization of docs and templates. ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
|
||||||
- Add dbt-common as a dependency and remove dbt/common ([#9357](https://github.com/dbt-labs/dbt-core/issues/9357))
|
|
||||||
- move cache exceptions to dbt/adapters ([#9362](https://github.com/dbt-labs/dbt-core/issues/9362))
|
|
||||||
- Clean up macro contexts. ([#9422](https://github.com/dbt-labs/dbt-core/issues/9422))
|
|
||||||
- Add the @requires.manifest decorator to the retry command. ([#9426](https://github.com/dbt-labs/dbt-core/issues/9426))
|
|
||||||
- Move WritableManifest + Documentation to dbt/artifacts ([#9378](https://github.com/dbt-labs/dbt-core/issues/9378), [#9379](https://github.com/dbt-labs/dbt-core/issues/9379))
|
|
||||||
- Define Macro and Group resources in dbt/artifacts ([#9381](https://github.com/dbt-labs/dbt-core/issues/9381), [#9382](https://github.com/dbt-labs/dbt-core/issues/9382))
|
|
||||||
- Move `SavedQuery` data definition to `dbt/artifacts` ([#9386](https://github.com/dbt-labs/dbt-core/issues/9386))
|
|
||||||
- Migrate data parts of `Metric` node to dbt/artifacts ([#9383](https://github.com/dbt-labs/dbt-core/issues/9383))
|
|
||||||
- Move data portion of `SemanticModel` to dbt/artifacts ([#9387](https://github.com/dbt-labs/dbt-core/issues/9387))
|
|
||||||
- Move data parts of `Exposure` class to dbt/artifacts ([#9380](https://github.com/dbt-labs/dbt-core/issues/9380))
|
|
||||||
- Split up deferral across parsing (adding 'defer_relation' from state manifest) and runtime ref resolution" ([#9199](https://github.com/dbt-labs/dbt-core/issues/9199))
|
|
||||||
- Start using `Mergeable` from dbt-common ([#9505](https://github.com/dbt-labs/dbt-core/issues/9505))
|
|
||||||
- Move manifest nodes to artifacts ([#9388](https://github.com/dbt-labs/dbt-core/issues/9388))
|
|
||||||
- Move data parts of `SourceDefinition` class to dbt/artifacts ([#9384](https://github.com/dbt-labs/dbt-core/issues/9384))
|
|
||||||
- Remove uses of Replaceable class ([#7802](https://github.com/dbt-labs/dbt-core/issues/7802))
|
|
||||||
- Make dbt-core compatible with Python 3.12 ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
|
||||||
- Restrict protobuf to major version 4. ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
|
||||||
- Remove references to dbt.tracking and dbt.flags from dbt/artifacts ([#9390](https://github.com/dbt-labs/dbt-core/issues/9390))
|
|
||||||
- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
|
||||||
- Implement primary key inference for model nodes ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
|
||||||
- Define UnitTestDefinition resource in dbt/artifacts/resources ([#9667](https://github.com/dbt-labs/dbt-core/issues/9667))
|
|
||||||
- Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest ([#9567](https://github.com/dbt-labs/dbt-core/issues/9567))
|
|
||||||
- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
|
||||||
- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619))
|
|
||||||
- Remove non dbt.artifacts dbt.* imports from dbt/artifacts ([#9926](https://github.com/dbt-labs/dbt-core/issues/9926))
|
|
||||||
- Migrate to using `error_tag` provided by `dbt-common` ([#9914](https://github.com/dbt-labs/dbt-core/issues/9914))
|
|
||||||
- Add a test for semantic manifest and move test fixtures needed for it ([#9665](https://github.com/dbt-labs/dbt-core/issues/9665))
|
|
||||||
- Raise deprecation warning if installed package overrides built-in materialization ([#9971](https://github.com/dbt-labs/dbt-core/issues/9971))
|
|
||||||
- Use the SECRET_ENV_PREFIX from dbt_common instead of duplicating it in dbt-core ([#10018](https://github.com/dbt-labs/dbt-core/issues/10018))
|
|
||||||
- Enable use of record mode via environment variable ([#10045](https://github.com/dbt-labs/dbt-core/issues/10045))
|
|
||||||
- Consistent naming + deprecation warnings for "legacy behavior" flags ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
|
||||||
- Enable use of context in serialization ([#10093](https://github.com/dbt-labs/dbt-core/issues/10093))
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Bump actions/checkout from 3 to 4 ([#8781](https://github.com/dbt-labs/dbt-core/issues/8781))
|
|
||||||
- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
|
||||||
- Update typing-extensions version to >=4.4 ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012))
|
|
||||||
- Bump ddtrace from 2.1.7 to 2.3.0 ([#9132](https://github.com/dbt-labs/dbt-core/issues/9132))
|
|
||||||
- Bump freezegun from 0.3.12 to 1.3.0 ([#9197](https://github.com/dbt-labs/dbt-core/issues/9197))
|
|
||||||
- Bump actions/setup-python from 4 to 5 ([#9267](https://github.com/dbt-labs/dbt-core/issues/9267))
|
|
||||||
- Bump actions/download-artifact from 3 to 4 ([#9374](https://github.com/dbt-labs/dbt-core/issues/9374))
|
|
||||||
- Relax pathspec upper bound version restriction ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
|
||||||
- remove dbt/adapters and add dependency on dbt-adapters ([#9430](https://github.com/dbt-labs/dbt-core/issues/9430))
|
|
||||||
- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/issues/9470))
|
|
||||||
- Bump actions/cache from 3 to 4 ([#9471](https://github.com/dbt-labs/dbt-core/issues/9471))
|
|
||||||
- Bump peter-evans/create-pull-request from 5 to 6 ([#9552](https://github.com/dbt-labs/dbt-core/issues/9552))
|
|
||||||
- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
|
||||||
- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/issues/9659))
|
|
||||||
- Cap dbt-semantic-interfaces version range to <0.6 ([#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
|
||||||
- Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker ([#9687](https://github.com/dbt-labs/dbt-core/issues/9687))
|
|
||||||
- bump dbt-common to accept major version 1 ([#9690](https://github.com/dbt-labs/dbt-core/issues/9690))
|
|
||||||
- Remove duplicate dependency of protobuf in dev-requirements ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
|
||||||
- Bump black from 23.3.0 to >=24.3.0,<25.0 ([#8074](https://github.com/dbt-labs/dbt-core/issues/8074))
|
|
||||||
- Update the agate pin to "agate>=1.7.0,<1.10" ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 ([#9638](https://github.com/dbt-labs/dbt-core/issues/9638))
|
|
||||||
- Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@LeoTheGriff](https://github.com/LeoTheGriff) ([#9003](https://github.com/dbt-labs/dbt-core/issues/9003))
|
|
||||||
- [@SamuelBFavarin](https://github.com/SamuelBFavarin) ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
|
||||||
- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
|
||||||
- [@adamlopez](https://github.com/adamlopez) ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
|
||||||
- [@akurdyukov](https://github.com/akurdyukov) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
|
||||||
- [@aliceliu](https://github.com/aliceliu) ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
|
||||||
- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
|
||||||
- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430))
|
|
||||||
- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798), [#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
|
||||||
- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
|
||||||
- [@colin-rorgers-dbt](https://github.com/colin-rorgers-dbt) ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
|
||||||
- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9507](https://github.com/dbt-labs/dbt-core/issues/9507), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
|
||||||
- [@damian3031](https://github.com/damian3031) ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
|
||||||
- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
|
||||||
- [@edgarrmondragon](https://github.com/edgarrmondragon) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
|
||||||
- [@emmoop](https://github.com/emmoop) ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
|
||||||
- [@heysweet](https://github.com/heysweet) ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
|
||||||
- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319), [#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
|
||||||
- [@l1xnan](https://github.com/l1xnan) ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
|
||||||
- [@mederka](https://github.com/mederka) ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
|
||||||
- [@mjkanji](https://github.com/mjkanji) ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
|
||||||
- [@nielspardon](https://github.com/nielspardon) ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
|
||||||
- [@niteshy](https://github.com/niteshy) ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
|
||||||
- [@ofek1weiss](https://github.com/ofek1weiss) ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
|
||||||
- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
|
||||||
- [@rzjfr](https://github.com/rzjfr) ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
|
||||||
- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
|
||||||
- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012), [#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
|
||||||
- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
|
||||||
|
|
||||||
## Previous Releases
|
## Previous Releases
|
||||||
|
|
||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
* [1.11](https://github.com/dbt-labs/dbt-core/blob/1.11.latest/CHANGELOG.md)
|
||||||
|
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
|
||||||
|
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
||||||
|
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
||||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||||
|
|||||||
163
CONTRIBUTING.md
163
CONTRIBUTING.md
@@ -2,21 +2,39 @@
|
|||||||
|
|
||||||
`dbt-core` is open source software. It is what it is today because community members have opened issues, provided feedback, and [contributed to the knowledge loop](https://www.getdbt.com/dbt-labs/values/). Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
`dbt-core` is open source software. It is what it is today because community members have opened issues, provided feedback, and [contributed to the knowledge loop](https://www.getdbt.com/dbt-labs/values/). Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||||
|
|
||||||
1. [About this document](#about-this-document)
|
- [Contributing to `dbt-core`](#contributing-to-dbt-core)
|
||||||
2. [Getting the code](#getting-the-code)
|
- [About this document](#about-this-document)
|
||||||
3. [Setting up an environment](#setting-up-an-environment)
|
- [Notes](#notes)
|
||||||
4. [Running dbt-core in development](#running-dbt-core-in-development)
|
- [Getting the code](#getting-the-code)
|
||||||
5. [Testing dbt-core](#testing)
|
- [Installing git](#installing-git)
|
||||||
6. [Debugging](#debugging)
|
- [External contributors](#external-contributors)
|
||||||
7. [Adding or modifying a changelog entry](#adding-or-modifying-a-changelog-entry)
|
- [dbt Labs contributors](#dbt-labs-contributors)
|
||||||
8. [Submitting a Pull Request](#submitting-a-pull-request)
|
- [Setting up an environment](#setting-up-an-environment)
|
||||||
9. [Troubleshooting Tips](#troubleshooting-tips)
|
- [Tools](#tools)
|
||||||
|
- [Virtual environments](#virtual-environments)
|
||||||
|
- [Docker and `docker-compose`](#docker-and-docker-compose)
|
||||||
|
- [Postgres (optional)](#postgres-optional)
|
||||||
|
- [Running `dbt-core` in development](#running-dbt-core-in-development)
|
||||||
|
- [Installation](#installation)
|
||||||
|
- [Running `dbt-core`](#running-dbt-core)
|
||||||
|
- [Testing](#testing)
|
||||||
|
- [Initial setup](#initial-setup)
|
||||||
|
- [Test commands](#test-commands)
|
||||||
|
- [Hatch scripts](#hatch-scripts)
|
||||||
|
- [`pre-commit`](#pre-commit)
|
||||||
|
- [`pytest`](#pytest)
|
||||||
|
- [Unit, Integration, Functional?](#unit-integration-functional)
|
||||||
|
- [Debugging](#debugging)
|
||||||
|
- [Assorted development tips](#assorted-development-tips)
|
||||||
|
- [Adding or modifying a CHANGELOG Entry](#adding-or-modifying-a-changelog-entry)
|
||||||
|
- [Submitting a Pull Request](#submitting-a-pull-request)
|
||||||
|
- [Troubleshooting Tips](#troubleshooting-tips)
|
||||||
|
|
||||||
## About this document
|
## About this document
|
||||||
|
|
||||||
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
||||||
|
|
||||||
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development and package managers. Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||||
|
|
||||||
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
||||||
|
|
||||||
@@ -55,28 +73,22 @@ There are some tools that will be helpful to you in developing locally. While th
|
|||||||
|
|
||||||
These are the tools used in `dbt-core` development and testing:
|
These are the tools used in `dbt-core` development and testing:
|
||||||
|
|
||||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.8, 3.9, 3.10 and 3.11
|
- [`hatch`](https://hatch.pypa.io/) for build backend, environment management, and running tests across Python versions (3.10, 3.11, 3.12, and 3.13)
|
||||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||||
- [`black`](https://github.com/psf/black) for code formatting
|
- [`black`](https://github.com/psf/black) for code formatting
|
||||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||||
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
||||||
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
||||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
|
|
||||||
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
||||||
|
|
||||||
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
||||||
|
|
||||||
#### Virtual environments
|
#### Virtual environments
|
||||||
|
|
||||||
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
dbt-core uses [Hatch](https://hatch.pypa.io/) for dependency and environment management. Hatch automatically creates and manages isolated environments for development, testing, and building, so you don't need to manually create virtual environments.
|
||||||
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
|
||||||
```sh
|
|
||||||
python3 -m venv env
|
|
||||||
source env/bin/activate
|
|
||||||
```
|
|
||||||
|
|
||||||
This will create and activate a new Python virtual environment.
|
For more information on how Hatch manages environments, see the [Hatch environment documentation](https://hatch.pypa.io/latest/environment/).
|
||||||
|
|
||||||
#### Docker and `docker-compose`
|
#### Docker and `docker-compose`
|
||||||
|
|
||||||
@@ -95,22 +107,42 @@ brew install postgresql
|
|||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
First make sure you have Python 3.10 or later installed. Ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `hatch`. Finally set up `dbt-core` for development:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
make dev
|
cd core
|
||||||
|
hatch run setup
|
||||||
```
|
```
|
||||||
or, alternatively:
|
|
||||||
|
This will install all development dependencies and set up pre-commit hooks.
|
||||||
|
|
||||||
|
By default, hatch will use whatever Python version is active in your environment. To specify a particular Python version, set the `HATCH_PYTHON` environment variable:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
export HATCH_PYTHON=3.12
|
||||||
pre-commit install
|
hatch env create
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Or add it to your shell profile (e.g., `~/.zshrc` or `~/.bashrc`) for persistence.
|
||||||
|
|
||||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||||
|
|
||||||
|
#### Building dbt-core
|
||||||
|
|
||||||
|
dbt-core uses [Hatch](https://hatch.pypa.io/) (specifically `hatchling`) as its build backend. To build distribution packages:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd core
|
||||||
|
hatch build
|
||||||
|
```
|
||||||
|
|
||||||
|
This will create both wheel (`.whl`) and source distribution (`.tar.gz`) files in the `dist/` directory.
|
||||||
|
|
||||||
|
The build configuration is defined in `core/pyproject.toml`. You can also use the standard `python -m build` command if you prefer.
|
||||||
|
|
||||||
### Running `dbt-core`
|
### Running `dbt-core`
|
||||||
|
|
||||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
Once you've run `hatch run setup`, the `dbt` command will be available in your PATH. You can verify this by running `which dbt`.
|
||||||
|
|
||||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
||||||
|
|
||||||
@@ -128,51 +160,84 @@ Although `dbt-core` works with a number of different databases, you won't need t
|
|||||||
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
make setup-db
|
cd core
|
||||||
|
hatch run setup-db
|
||||||
```
|
```
|
||||||
or, alternatively:
|
|
||||||
|
Alternatively, you can run the setup commands directly:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker-compose up -d database
|
docker-compose up -d database
|
||||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash scripts/setup_db.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### Test commands
|
### Test commands
|
||||||
|
|
||||||
There are a few methods for running tests locally.
|
There are a few methods for running tests locally.
|
||||||
|
|
||||||
#### Makefile
|
#### Hatch scripts
|
||||||
|
|
||||||
There are multiple targets in the Makefile to run common test suites and code
|
The primary way to run tests and checks is using hatch scripts (defined in `core/hatch.toml`):
|
||||||
checks, most notably:
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Runs unit tests with py38 and code checks in parallel.
|
cd core
|
||||||
make test
|
|
||||||
# Runs postgres integration tests with py38 in "fail fast" mode.
|
|
||||||
make integration
|
|
||||||
```
|
|
||||||
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
|
||||||
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
|
||||||
|
|
||||||
Check out the other targets in the Makefile to see other commonly used test
|
# Run all unit tests
|
||||||
suites.
|
hatch run unit-tests
|
||||||
|
|
||||||
|
# Run unit tests and all code quality checks
|
||||||
|
hatch run test
|
||||||
|
|
||||||
|
# Run integration tests
|
||||||
|
hatch run integration-tests
|
||||||
|
|
||||||
|
# Run integration tests in fail-fast mode
|
||||||
|
hatch run integration-tests-fail-fast
|
||||||
|
|
||||||
|
# Run linting checks only
|
||||||
|
hatch run lint
|
||||||
|
hatch run flake8
|
||||||
|
hatch run mypy
|
||||||
|
hatch run black
|
||||||
|
|
||||||
|
# Run all pre-commit hooks
|
||||||
|
hatch run code-quality
|
||||||
|
|
||||||
|
# Clean build artifacts
|
||||||
|
hatch run clean
|
||||||
|
```
|
||||||
|
|
||||||
|
Hatch manages isolated environments and dependencies automatically. The commands above use the `default` environment which is recommended for most local development.
|
||||||
|
|
||||||
|
**Using the `ci` environment (optional)**
|
||||||
|
|
||||||
|
If you need to replicate exactly what runs in GitHub Actions (e.g., with coverage reporting), use the `ci` environment:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd core
|
||||||
|
|
||||||
|
# Run unit tests with coverage
|
||||||
|
hatch run ci:unit-tests
|
||||||
|
|
||||||
|
# Run unit tests with a specific Python version
|
||||||
|
hatch run +py=3.11 ci:unit-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** Most developers should use the default environment (`hatch run unit-tests`). The `ci` environment is primarily for debugging CI failures or running tests with coverage.
|
||||||
|
|
||||||
#### `pre-commit`
|
#### `pre-commit`
|
||||||
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This command installs several pip executables including black, mypy, and flake8. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
|
||||||
|
|
||||||
#### `tox`
|
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `hatch run setup` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This installs several pip executables including black, mypy, and flake8. Once installed, hooks will run automatically on `git commit`, or you can run them manually with `hatch run code-quality`.
|
||||||
|
|
||||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
|
|
||||||
|
|
||||||
#### `pytest`
|
#### `pytest`
|
||||||
|
|
||||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:
|
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. After running `hatch run setup`, you can run pytest commands like:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# run all unit tests in a file
|
# run all unit tests in a file
|
||||||
python3 -m pytest tests/unit/test_base_column.py
|
python3 -m pytest tests/unit/test_invocation_id.py
|
||||||
# run a specific unit test
|
# run a specific unit test
|
||||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id
|
||||||
# run specific Postgres functional tests
|
# run specific Postgres functional tests
|
||||||
python3 -m pytest tests/functional/sources
|
python3 -m pytest tests/functional/sources
|
||||||
```
|
```
|
||||||
@@ -224,7 +289,9 @@ Code can be merged into the current development branch `main` by opening a pull
|
|||||||
|
|
||||||
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
||||||
|
|
||||||
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
We require signed git commits. See docs [here](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) for setting up code signing.
|
||||||
|
|
||||||
|
Once all tests are passing, all comments are resolved, and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||||
|
|
||||||
## Troubleshooting Tips
|
## Troubleshooting Tips
|
||||||
|
|
||||||
|
|||||||
@@ -33,9 +33,6 @@ RUN apt-get update \
|
|||||||
python-is-python3 \
|
python-is-python3 \
|
||||||
python-dev-is-python3 \
|
python-dev-is-python3 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
python3.9 \
|
|
||||||
python3.9-dev \
|
|
||||||
python3.9-venv \
|
|
||||||
python3.10 \
|
python3.10 \
|
||||||
python3.10-dev \
|
python3.10-dev \
|
||||||
python3.10-venv \
|
python3.10-venv \
|
||||||
@@ -50,7 +47,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V
|
|||||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||||
|
|
||||||
RUN pip3 install -U tox wheel six setuptools
|
RUN pip3 install -U hatch wheel pre-commit
|
||||||
|
|
||||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||||
# On Linux, run `make .env` to create the .env file for the current user.
|
# On Linux, run `make .env` to create the .env file for the current user.
|
||||||
@@ -65,7 +62,6 @@ RUN if [ ${USER_ID:-0} -ne 0 ] && [ ${GROUP_ID:-0} -ne 0 ]; then \
|
|||||||
useradd -mU -l dbt_test_user; \
|
useradd -mU -l dbt_test_user; \
|
||||||
fi
|
fi
|
||||||
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
||||||
RUN mkdir /home/tox && chown dbt_test_user /home/tox
|
|
||||||
|
|
||||||
WORKDIR /usr/app
|
WORKDIR /usr/app
|
||||||
VOLUME /usr/app
|
VOLUME /usr/app
|
||||||
|
|||||||
163
Makefile
163
Makefile
@@ -1,146 +1,95 @@
|
|||||||
|
# ============================================================================
|
||||||
|
# DEPRECATED: This Makefile is maintained for backwards compatibility only.
|
||||||
|
#
|
||||||
|
# dbt-core now uses Hatch for task management and development workflows.
|
||||||
|
# Please migrate to using hatch commands directly:
|
||||||
|
#
|
||||||
|
# make dev → cd core && hatch run setup
|
||||||
|
# make unit → cd core && hatch run unit-tests
|
||||||
|
# make test → cd core && hatch run test
|
||||||
|
# make integration → cd core && hatch run integration-tests
|
||||||
|
# make lint → cd core && hatch run lint
|
||||||
|
# make code_quality → cd core && hatch run code-quality
|
||||||
|
# make setup-db → cd core && hatch run setup-db
|
||||||
|
# make clean → cd core && hatch run clean
|
||||||
|
#
|
||||||
|
# See core/pyproject.toml [tool.hatch.envs.default.scripts] for all available
|
||||||
|
# commands and CONTRIBUTING.md for detailed usage instructions.
|
||||||
|
#
|
||||||
|
# This Makefile will be removed in a future version of dbt-core.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
.DEFAULT_GOAL:=help
|
.DEFAULT_GOAL:=help
|
||||||
|
|
||||||
# Optional flag to run target in a docker container.
|
|
||||||
# (example `make test USE_DOCKER=true`)
|
|
||||||
ifeq ($(USE_DOCKER),true)
|
|
||||||
DOCKER_CMD := docker-compose run --rm test
|
|
||||||
endif
|
|
||||||
|
|
||||||
#
|
|
||||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
|
||||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
|
||||||
# DBT_TEST_USER_1=user
|
|
||||||
# LOG_DIR="dir with a space in it"
|
|
||||||
#
|
|
||||||
# Warn: Restrict each line to one variable only.
|
|
||||||
#
|
|
||||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
|
||||||
include ./makefile.test.env
|
|
||||||
endif
|
|
||||||
|
|
||||||
CI_FLAGS =\
|
|
||||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
|
||||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
|
||||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
|
||||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
|
||||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
|
||||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: dev_req
|
.PHONY: dev_req
|
||||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||||
@\
|
@cd core && hatch run dev-req
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
|
||||||
|
|
||||||
.PHONY: dev
|
.PHONY: dev
|
||||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
dev: ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||||
@\
|
@cd core && hatch run setup
|
||||||
pre-commit install
|
|
||||||
|
|
||||||
.PHONY: dev-uninstall
|
.PHONY: dev-uninstall
|
||||||
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||||
@\
|
@pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
||||||
pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
pip uninstall -y dbt-core
|
||||||
pip uninstall -y dbt-core
|
|
||||||
|
|
||||||
.PHONY: core_proto_types
|
|
||||||
core_proto_types: ## generates google protobuf python file from core_types.proto
|
|
||||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
|
||||||
|
|
||||||
.PHONY: mypy
|
.PHONY: mypy
|
||||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
mypy: ## Runs mypy against staged changes for static type checking.
|
||||||
@\
|
@cd core && hatch run mypy
|
||||||
$(DOCKER_CMD) pre-commit run --hook-stage manual mypy-check | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: flake8
|
.PHONY: flake8
|
||||||
flake8: .env ## Runs flake8 against staged changes to enforce style guide.
|
flake8: ## Runs flake8 against staged changes to enforce style guide.
|
||||||
@\
|
@cd core && hatch run flake8
|
||||||
$(DOCKER_CMD) pre-commit run --hook-stage manual flake8-check | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: black
|
.PHONY: black
|
||||||
black: .env ## Runs black against staged changes to enforce style guide.
|
black: ## Runs black against staged changes to enforce style guide.
|
||||||
@\
|
@cd core && hatch run black
|
||||||
$(DOCKER_CMD) pre-commit run --hook-stage manual black-check -v | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: lint
|
.PHONY: lint
|
||||||
lint: .env ## Runs flake8 and mypy code checks against staged changes.
|
lint: ## Runs flake8 and mypy code checks against staged changes.
|
||||||
@\
|
@cd core && hatch run lint
|
||||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
|
||||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
.PHONY: code_quality
|
||||||
|
code_quality: ## Runs all pre-commit hooks against all files.
|
||||||
|
@cd core && hatch run code-quality
|
||||||
|
|
||||||
.PHONY: unit
|
.PHONY: unit
|
||||||
unit: .env ## Runs unit tests with py
|
unit: ## Runs unit tests with py
|
||||||
@\
|
@cd core && hatch run unit-tests
|
||||||
$(DOCKER_CMD) tox -e py
|
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: .env ## Runs unit tests with py and code checks against staged changes.
|
test: ## Runs unit tests with py and code checks against staged changes.
|
||||||
@\
|
@cd core && hatch run test
|
||||||
$(DOCKER_CMD) tox -e py; \
|
|
||||||
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
|
||||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
|
||||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
|
||||||
|
|
||||||
.PHONY: integration
|
.PHONY: integration
|
||||||
integration: .env ## Runs core integration tests using postgres with py-integration
|
integration: ## Runs core integration tests using postgres with py-integration
|
||||||
@\
|
@cd core && hatch run integration-tests
|
||||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
|
||||||
|
|
||||||
.PHONY: integration-fail-fast
|
.PHONY: integration-fail-fast
|
||||||
integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
integration-fail-fast: ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
||||||
@\
|
@cd core && hatch run integration-tests-fail-fast
|
||||||
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
|
||||||
|
|
||||||
.PHONY: interop
|
|
||||||
interop: clean
|
|
||||||
@\
|
|
||||||
mkdir $(LOG_DIR) && \
|
|
||||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
|
||||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
|
||||||
|
|
||||||
.PHONY: setup-db
|
.PHONY: setup-db
|
||||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||||
@\
|
@cd core && hatch run setup-db
|
||||||
docker-compose up -d database && \
|
|
||||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
|
||||||
|
|
||||||
# This rule creates a file named .env that is used by docker-compose for passing
|
|
||||||
# the USER_ID and GROUP_ID arguments to the Docker image.
|
|
||||||
.env: ## Setup step for using using docker-compose with make target.
|
|
||||||
@touch .env
|
|
||||||
ifneq ($(OS),Windows_NT)
|
|
||||||
ifneq ($(shell uname -s), Darwin)
|
|
||||||
@echo USER_ID=$(shell id -u) > .env
|
|
||||||
@echo GROUP_ID=$(shell id -g) >> .env
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean: ## Resets development environment.
|
clean: ## Resets development environment.
|
||||||
@echo 'cleaning repo...'
|
@cd core && hatch run clean
|
||||||
@rm -f .coverage
|
|
||||||
@rm -f .coverage.*
|
|
||||||
@rm -rf .eggs/
|
|
||||||
@rm -f .env
|
|
||||||
@rm -rf .tox/
|
|
||||||
@rm -rf build/
|
|
||||||
@rm -rf dbt.egg-info/
|
|
||||||
@rm -f dbt_project.yml
|
|
||||||
@rm -rf dist/
|
|
||||||
@rm -f htmlcov/*.{css,html,js,json,png}
|
|
||||||
@rm -rf logs/
|
|
||||||
@rm -rf target/
|
|
||||||
@find . -type f -name '*.pyc' -delete
|
|
||||||
@find . -type d -name '__pycache__' -depth -delete
|
|
||||||
@echo 'done.'
|
|
||||||
|
|
||||||
|
.PHONY: json_schema
|
||||||
|
json_schema: ## Update generated JSON schema using code changes.
|
||||||
|
@cd core && hatch run json-schema
|
||||||
|
|
||||||
.PHONY: help
|
.PHONY: help
|
||||||
help: ## Show this help message.
|
help: ## Show this help message.
|
||||||
@echo 'usage: make [target] [USE_DOCKER=true]'
|
@echo 'usage: make [target]'
|
||||||
|
@echo
|
||||||
|
@echo 'DEPRECATED: This Makefile is a compatibility shim.'
|
||||||
|
@echo 'Please use "cd core && hatch run <command>" directly.'
|
||||||
@echo
|
@echo
|
||||||
@echo 'targets:'
|
@echo 'targets:'
|
||||||
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||||
@echo
|
@echo
|
||||||
@echo 'options:'
|
@echo 'For more information, see CONTRIBUTING.md'
|
||||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
||||||
<img src="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml/badge.svg?event=push" alt="CI Badge"/>
|
<img src="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml/badge.svg?event=push" alt="CI Badge"/>
|
||||||
</a>
|
</a>
|
||||||
|
<a href="https://www.bestpractices.dev/projects/11095"><img src="https://www.bestpractices.dev/projects/11095/badge"></a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||||
|
|||||||
1
SECURITY.md
Normal file
1
SECURITY.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[About dbt Core versions](https://docs.getdbt.com/docs/dbt-versions/core)
|
||||||
27
codecov.yml
27
codecov.yml
@@ -1,13 +1,24 @@
|
|||||||
ignore:
|
ignore:
|
||||||
- ".github"
|
- ".github"
|
||||||
- ".changes"
|
- ".changes"
|
||||||
|
|
||||||
|
# Disable all status checks to prevent red X's in CI
|
||||||
|
# Coverage data is still uploaded and PR comments are still posted
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
project:
|
project: off
|
||||||
default:
|
patch: off
|
||||||
target: auto
|
|
||||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
comment:
|
||||||
patch:
|
layout: "header, diff, flags, components" # show component info in the PR comment
|
||||||
default:
|
|
||||||
target: auto
|
component_management:
|
||||||
threshold: 80%
|
individual_components:
|
||||||
|
- component_id: unittests
|
||||||
|
name: "Unit Tests"
|
||||||
|
flag_regexes:
|
||||||
|
- "unit"
|
||||||
|
- component_id: integrationtests
|
||||||
|
name: "Integration Tests"
|
||||||
|
flag_regexes:
|
||||||
|
- "integration"
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
|
||||||
include dbt/py.typed
|
|
||||||
recursive-include dbt/task/docs *.html
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="https://raw.githubusercontent.com/dbt-labs/dbt-core/fa1ea14ddfb1d5ae319d5141844910dd53ab2834/etc/dbt-core.svg" alt="dbt logo" width="750"/>
|
<img src="https://raw.githubusercontent.com/dbt-labs/dbt-core/fa1ea14ddfb1d5ae319d5141844910dd53ab2834/docs/images/dbt-core.svg" alt="dbt logo" width="750"/>
|
||||||
</p>
|
</p>
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Understanding dbt
|
## Understanding dbt
|
||||||
|
|
||||||
@@ -17,7 +17,7 @@ Analysts using dbt can transform their data by simply writing select statements,
|
|||||||
|
|
||||||
These select statements, or "models", form a dbt project. Models frequently build on top of one another – dbt makes it easy to [manage relationships](https://docs.getdbt.com/docs/ref) between models, and [visualize these relationships](https://docs.getdbt.com/docs/documentation), as well as assure the quality of your transformations through [testing](https://docs.getdbt.com/docs/testing).
|
These select statements, or "models", form a dbt project. Models frequently build on top of one another – dbt makes it easy to [manage relationships](https://docs.getdbt.com/docs/ref) between models, and [visualize these relationships](https://docs.getdbt.com/docs/documentation), as well as assure the quality of your transformations through [testing](https://docs.getdbt.com/docs/testing).
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
|
|||||||
@@ -22,8 +22,6 @@
|
|||||||
|
|
||||||
### links.py
|
### links.py
|
||||||
|
|
||||||
### logger.py
|
|
||||||
|
|
||||||
### main.py
|
### main.py
|
||||||
|
|
||||||
### node_types.py
|
### node_types.py
|
||||||
|
|||||||
1
core/dbt/__version__.py
Normal file
1
core/dbt/__version__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
version = "1.12.0a1"
|
||||||
26
core/dbt/_pydantic_shim.py
Normal file
26
core/dbt/_pydantic_shim.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# type: ignore
|
||||||
|
|
||||||
|
"""Shim to allow support for both Pydantic 1 and Pydantic 2.
|
||||||
|
|
||||||
|
dbt-core must support both major versions of Pydantic because dbt-core users might be using an environment with
|
||||||
|
either version, and we can't restrict them to one or the other. Here, we essentially import all Pydantic objects
|
||||||
|
from version 1 that we use. Throughout the repo, we import these objects from this file instead of from Pydantic
|
||||||
|
directly, meaning that we essentially only use Pydantic 1 in dbt-core currently, but without forcing that restriction
|
||||||
|
on dbt users. The development environment for this repo should be pinned to Pydantic 1 to ensure devs get appropriate
|
||||||
|
type hints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from importlib.metadata import version
|
||||||
|
|
||||||
|
pydantic_version = version("pydantic")
|
||||||
|
# Pydantic uses semantic versioning, i.e. <major>.<minor>.<patch>, and we need to know the major
|
||||||
|
pydantic_major = pydantic_version.split(".")[0]
|
||||||
|
|
||||||
|
if pydantic_major == "1":
|
||||||
|
from pydantic import BaseSettings # noqa: F401
|
||||||
|
elif pydantic_major == "2":
|
||||||
|
from pydantic.v1 import BaseSettings # noqa: F401
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Currently only pydantic 1 and 2 are supported, found pydantic {pydantic_version}"
|
||||||
|
)
|
||||||
@@ -1,24 +1,23 @@
|
|||||||
# dbt/artifacts
|
# dbt/artifacts
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
This directory is meant to be a lightweight module that is independent (and upstream of) the rest of dbt-core internals.
|
This directory is meant to be a lightweight module that is independent (and upstream of) the rest of `dbt-core` internals.
|
||||||
|
|
||||||
It's primary responsibility is to define simple data classes that represent the versioned artifact schemas that dbt writes as JSON files throughout execution.
|
Its primary responsibility is to define simple data classes that represent the versioned artifact schemas that dbt writes as JSON files throughout execution.
|
||||||
|
|
||||||
Long term, this module may be released as a standalone package (e.g. dbt-artifacts) to support stable parsing dbt artifacts programmatically.
|
Eventually, this module may be released as a standalone package (e.g. `dbt-artifacts`) to support stable programmatic parsing of dbt artifacts.
|
||||||
|
|
||||||
`dbt/artifacts` is organized into artifact 'schemas' and 'resources'. Schemas represent the final serialized artifact object, while resources represent sub-components of the larger artifact schemas.
|
`dbt/artifacts` is organized into artifact 'schemas' and 'resources'. Schemas represent the final serialized artifact objects, while resources represent smaller components within those schemas.
|
||||||
|
|
||||||
### dbt/artifacts/schemas
|
### dbt/artifacts/schemas
|
||||||
|
|
||||||
|
Each major version of a schema under `dbt/artifacts/schema` is defined in its corresponding `dbt/artifacts/schema/<artifact-name>/v<version>` directory. Before `dbt/artifacts` artifact schemas were always modified in-place, which is why older artifacts are those missing class definitions.
|
||||||
Each major version of a schema under `dbt/artifacts/schema` is defined in its corresponding `dbt/artifacts/schema/<artifact-name>/v<version>` directory. Before `dbt/artifacts` artifact schemas were always modified in-place, which is why artifacts are missing class definitions for historical versions.
|
|
||||||
|
|
||||||
Currently, there are four artifact schemas defined in `dbt/artifact/schemas`:
|
Currently, there are four artifact schemas defined in `dbt/artifact/schemas`:
|
||||||
|
|
||||||
| Artifact name | File | Class | Latest definition |
|
| Artifact name | File | Class | Latest definition |
|
||||||
|---------------|------------------|----------------------------------|-----------------------------------|
|
|---------------|------------------|----------------------------------|-----------------------------------|
|
||||||
| manifest | manifest.json | WritableManifest | dbt/artifacts/schema/manifest/v11 |
|
| manifest | manifest.json | WritableManifest | dbt/artifacts/schema/manifest/v12 |
|
||||||
| catalog | catalog.json | CatalogArtifact | dbt/artifacts/schema/catalog/v1 |
|
| catalog | catalog.json | CatalogArtifact | dbt/artifacts/schema/catalog/v1 |
|
||||||
| run | run_results.json | RunResultsArtifact | dbt/artifacts/schema/run/v5 |
|
| run | run_results.json | RunResultsArtifact | dbt/artifacts/schema/run/v5 |
|
||||||
| freshness | sources.json | FreshnessExecutionResultArtifact | dbt/artifacts/schema/freshness/v3 |
|
| freshness | sources.json | FreshnessExecutionResultArtifact | dbt/artifacts/schema/freshness/v3 |
|
||||||
@@ -30,29 +29,46 @@ All existing resources are defined under `dbt/artifacts/resources/v1`.
|
|||||||
|
|
||||||
## Making changes to dbt/artifacts
|
## Making changes to dbt/artifacts
|
||||||
|
|
||||||
|
### All changes
|
||||||
|
|
||||||
|
All changes to any fields will require a manual update to [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) to ensure live checking continues to work.
|
||||||
|
|
||||||
### Non-breaking changes
|
### Non-breaking changes
|
||||||
|
|
||||||
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact in mantle (via minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
||||||
* Adding a new field with a default
|
* Adding a new field with a default
|
||||||
* Deleting an __optional__ field
|
* Deleting a field with a default. This is compatible in terms of serialization and deserialization, but still may be lead to suprising behaviour:
|
||||||
|
* For artifact consumers relying on the fields existence (e.g. `manifest["deleted_field"]` will stop working unless the access was implemented safely)
|
||||||
|
* Old code (e.g. in dbt-core) that relies on the value of the deleted field may have surprising behaviour given only the default value will be set when instantiated from the new schema
|
||||||
|
|
||||||
|
These types of minor, non-breaking changes are tested by [tests/unit/artifacts/test_base_resource.py::TestMinorSchemaChange](https://github.com/dbt-labs/dbt-core/blob/main/tests/unit/artifacts/test_base_resource.py).
|
||||||
|
|
||||||
|
|
||||||
|
#### Updating [schemas.getdbt.com](https://schemas.getdbt.com)
|
||||||
|
Non-breaking changes to artifact schemas require an update to the corresponding jsonschemas published to [schemas.getdbt.com](https://schemas.getdbt.com), which are defined in https://github.com/dbt-labs/schemas.getdbt.com. To do so:
|
||||||
|
Note this must be done AFTER the core pull request is merged, otherwise we may end up with unresolvable conflicts and schemas that are invalid prior to base pull request merge. You may create the schemas.getdbt.com pull request prior to merging the base pull request, but do not merge until afterward.
|
||||||
|
1. Create a PR in https://github.com/dbt-labs/schemas.getdbt.com which reflects the schema changes to the artifact. The schema can be updated in-place for non-breaking changes. Example PR: https://github.com/dbt-labs/schemas.getdbt.com/pull/39
|
||||||
|
2. Merge the https://github.com/dbt-labs/schemas.getdbt.com PR
|
||||||
|
|
||||||
|
Note: Although `jsonschema` validation using the schemas in [schemas.getdbt.com](https://schemas.getdbt.com) is not encouraged or formally supported, `jsonschema` validation should still continue to work once the schemas are updated because they are forward-compatible and can therefore be used to validate previous minor versions of the schema.
|
||||||
|
|
||||||
### Breaking changes
|
### Breaking changes
|
||||||
A breaking change is anything that:
|
A breaking change is anything that:
|
||||||
* Deletes a required field
|
* Deletes a required field
|
||||||
* Changes the name or type of an existing field
|
* Changes the name or type of an existing field
|
||||||
* Removes default from a field
|
* Removes the default value of an existing field
|
||||||
|
|
||||||
These should generally be avoided, and bundled together to aim for as minimal disruption across the integration ecosystem as possible.
|
These should be avoided however possible. When necessary, multiple breaking changes should be bundled together, to aim for minimal disruption across the ecosystem of tools that leverage dbt metadata.
|
||||||
|
|
||||||
However, when it comes time to make one (or more) of these, a new versioned artifact should be created as follows:
|
When it comes time to make breaking changes, a new versioned artifact should be created as follows:
|
||||||
1. Create a new version directory and file that defines the new artifact schema under `dbt/artifacts/schemas/<artifact>/v<next-artifact-version>/<artifact>.py`
|
1. Create a new version directory and file that defines the new artifact schema under `dbt/artifacts/schemas/<artifact>/v<next-artifact-version>/<artifact>.py`
|
||||||
2. If any resources are having breaking changes introduced, create a new resource class that defines the new resource schema under `dbt/artifacts/resources/v<next-resource-version>/<resource>.py`
|
2. If any resources are having breaking changes introduced, create a new resource class that defines the new resource schema under `dbt/artifacts/resources/v<next-resource-version>/<resource>.py`
|
||||||
3. Implement upgrade paths on the new versioned artifact class so it can be constructed given a dictionary representation of any previous version of the same artifact
|
3. Implement upgrade paths on the new versioned artifact class so it can be constructed given a dictionary representation of any previous version of the same artifact
|
||||||
* TODO: update once the design is finalized
|
* TODO: link example once available
|
||||||
4. Implement downgrade paths on all previous versions of the artifact class so they can still be constructed given a dictionary representation of the new artifact schema
|
4. Implement downgrade paths on all previous versions of the artifact class so they can still be constructed given a dictionary representation of the new artifact schema
|
||||||
* TODO: update once the design is finalized
|
* TODO: link example once available
|
||||||
5. Update the 'latest' aliases to point to the new version of the artifact and/or resource:
|
5. Update the 'latest' aliases to point to the new version of the artifact and/or resource:
|
||||||
* Artifact: `dbt/artifacts/schemas/<artifact>/__init__.py `
|
* Artifact: `dbt/artifacts/schemas/<artifact>/__init__.py `
|
||||||
* Resource: `dbt/artifacts/resources/__init__.py `
|
* Resource: `dbt/artifacts/resources/__init__.py `
|
||||||
|
|
||||||
Downstream consumers (e.g. dbt-core) importing from the latest alias are susceptible to breaking changes. Ideally, any incompatibilities should be caught my static type checking in those systems. However, it is always possible for consumers to pin imports to previous versions via `dbt.artifacts.schemas.<artifact>.v<prev-version>`
|
Downstream consumers (e.g. `dbt-core`) importing from the latest alias are susceptible to breaking changes. Ideally, any incompatibilities should be caught my static type checking in those systems. However, it is always possible for consumers to pin imports to previous versions via `dbt.artifacts.schemas.<artifact>.v<prev-version>`.
|
||||||
|
|||||||
@@ -1,32 +1,33 @@
|
|||||||
from dbt.artifacts.resources.base import BaseResource, GraphResource, FileHash, Docs
|
from dbt.artifacts.resources.base import BaseResource, Docs, FileHash, GraphResource
|
||||||
|
from dbt.artifacts.resources.v1.analysis import Analysis
|
||||||
|
from dbt.artifacts.resources.v1.catalog import Catalog, CatalogWriteIntegrationConfig
|
||||||
|
|
||||||
# alias to latest resource definitions
|
# alias to latest resource definitions
|
||||||
from dbt.artifacts.resources.v1.components import (
|
from dbt.artifacts.resources.v1.components import (
|
||||||
DependsOn,
|
ColumnConfig,
|
||||||
NodeVersion,
|
|
||||||
RefArgs,
|
|
||||||
HasRelationMetadata,
|
|
||||||
ParsedResourceMandatory,
|
|
||||||
ParsedResource,
|
|
||||||
ColumnInfo,
|
ColumnInfo,
|
||||||
CompiledResource,
|
CompiledResource,
|
||||||
InjectedCTE,
|
|
||||||
Contract,
|
Contract,
|
||||||
DeferRelation,
|
DeferRelation,
|
||||||
|
DependsOn,
|
||||||
FreshnessThreshold,
|
FreshnessThreshold,
|
||||||
|
HasRelationMetadata,
|
||||||
|
InjectedCTE,
|
||||||
|
NodeVersion,
|
||||||
|
ParsedResource,
|
||||||
|
ParsedResourceMandatory,
|
||||||
Quoting,
|
Quoting,
|
||||||
|
RefArgs,
|
||||||
Time,
|
Time,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.resources.v1.analysis import Analysis
|
from dbt.artifacts.resources.v1.config import (
|
||||||
from dbt.artifacts.resources.v1.hook import HookNode
|
Hook,
|
||||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
NodeAndTestConfig,
|
||||||
from dbt.artifacts.resources.v1.sql_operation import SqlOperation
|
NodeConfig,
|
||||||
from dbt.artifacts.resources.v1.seed import Seed, SeedConfig
|
TestConfig,
|
||||||
from dbt.artifacts.resources.v1.singular_test import SingularTest
|
list_str,
|
||||||
from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata
|
metas,
|
||||||
from dbt.artifacts.resources.v1.snapshot import Snapshot, SnapshotConfig
|
)
|
||||||
|
|
||||||
|
|
||||||
from dbt.artifacts.resources.v1.documentation import Documentation
|
from dbt.artifacts.resources.v1.documentation import Documentation
|
||||||
from dbt.artifacts.resources.v1.exposure import (
|
from dbt.artifacts.resources.v1.exposure import (
|
||||||
Exposure,
|
Exposure,
|
||||||
@@ -34,18 +35,36 @@ from dbt.artifacts.resources.v1.exposure import (
|
|||||||
ExposureType,
|
ExposureType,
|
||||||
MaturityType,
|
MaturityType,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.resources.v1.macro import Macro, MacroDependsOn, MacroArgument
|
from dbt.artifacts.resources.v1.function import (
|
||||||
from dbt.artifacts.resources.v1.group import Group
|
Function,
|
||||||
|
FunctionArgument,
|
||||||
|
FunctionConfig,
|
||||||
|
FunctionMandatory,
|
||||||
|
FunctionReturns,
|
||||||
|
)
|
||||||
|
from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata
|
||||||
|
from dbt.artifacts.resources.v1.group import Group, GroupConfig
|
||||||
|
from dbt.artifacts.resources.v1.hook import HookNode
|
||||||
|
from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsOn
|
||||||
from dbt.artifacts.resources.v1.metric import (
|
from dbt.artifacts.resources.v1.metric import (
|
||||||
ConstantPropertyInput,
|
ConstantPropertyInput,
|
||||||
ConversionTypeParams,
|
ConversionTypeParams,
|
||||||
|
CumulativeTypeParams,
|
||||||
Metric,
|
Metric,
|
||||||
|
MetricAggregationParams,
|
||||||
MetricConfig,
|
MetricConfig,
|
||||||
MetricInput,
|
MetricInput,
|
||||||
MetricInputMeasure,
|
MetricInputMeasure,
|
||||||
MetricTimeWindow,
|
MetricTimeWindow,
|
||||||
MetricTypeParams,
|
MetricTypeParams,
|
||||||
)
|
)
|
||||||
|
from dbt.artifacts.resources.v1.model import (
|
||||||
|
CustomGranularity,
|
||||||
|
Model,
|
||||||
|
ModelConfig,
|
||||||
|
ModelFreshness,
|
||||||
|
TimeSpine,
|
||||||
|
)
|
||||||
from dbt.artifacts.resources.v1.owner import Owner
|
from dbt.artifacts.resources.v1.owner import Owner
|
||||||
from dbt.artifacts.resources.v1.saved_query import (
|
from dbt.artifacts.resources.v1.saved_query import (
|
||||||
Export,
|
Export,
|
||||||
@@ -55,8 +74,11 @@ from dbt.artifacts.resources.v1.saved_query import (
|
|||||||
SavedQueryConfig,
|
SavedQueryConfig,
|
||||||
SavedQueryMandatory,
|
SavedQueryMandatory,
|
||||||
)
|
)
|
||||||
|
from dbt.artifacts.resources.v1.seed import Seed, SeedConfig
|
||||||
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
||||||
FileSlice,
|
FileSlice,
|
||||||
|
MeasureAggregationParameters,
|
||||||
|
NonAdditiveDimension,
|
||||||
SourceFileMetadata,
|
SourceFileMetadata,
|
||||||
WhereFilter,
|
WhereFilter,
|
||||||
WhereFilterIntersection,
|
WhereFilterIntersection,
|
||||||
@@ -68,34 +90,27 @@ from dbt.artifacts.resources.v1.semantic_model import (
|
|||||||
DimensionValidityParams,
|
DimensionValidityParams,
|
||||||
Entity,
|
Entity,
|
||||||
Measure,
|
Measure,
|
||||||
MeasureAggregationParameters,
|
|
||||||
NodeRelation,
|
NodeRelation,
|
||||||
NonAdditiveDimension,
|
SemanticLayerElementConfig,
|
||||||
SemanticModel,
|
SemanticModel,
|
||||||
SemanticModelConfig,
|
SemanticModelConfig,
|
||||||
)
|
)
|
||||||
|
from dbt.artifacts.resources.v1.singular_test import SingularTest
|
||||||
from dbt.artifacts.resources.v1.config import (
|
from dbt.artifacts.resources.v1.snapshot import Snapshot, SnapshotConfig
|
||||||
NodeAndTestConfig,
|
|
||||||
NodeConfig,
|
|
||||||
TestConfig,
|
|
||||||
Hook,
|
|
||||||
)
|
|
||||||
|
|
||||||
from dbt.artifacts.resources.v1.source_definition import (
|
from dbt.artifacts.resources.v1.source_definition import (
|
||||||
SourceConfig,
|
|
||||||
ExternalPartition,
|
ExternalPartition,
|
||||||
ExternalTable,
|
ExternalTable,
|
||||||
SourceDefinition,
|
|
||||||
ParsedSourceMandatory,
|
ParsedSourceMandatory,
|
||||||
|
SourceConfig,
|
||||||
|
SourceDefinition,
|
||||||
)
|
)
|
||||||
|
from dbt.artifacts.resources.v1.sql_operation import SqlOperation
|
||||||
from dbt.artifacts.resources.v1.unit_test_definition import (
|
from dbt.artifacts.resources.v1.unit_test_definition import (
|
||||||
UnitTestConfig,
|
UnitTestConfig,
|
||||||
UnitTestDefinition,
|
UnitTestDefinition,
|
||||||
|
UnitTestFormat,
|
||||||
UnitTestInputFixture,
|
UnitTestInputFixture,
|
||||||
|
UnitTestNodeVersions,
|
||||||
UnitTestOutputFixture,
|
UnitTestOutputFixture,
|
||||||
UnitTestOverrides,
|
UnitTestOverrides,
|
||||||
UnitTestNodeVersions,
|
|
||||||
UnitTestFormat,
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
from typing import List, Optional
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ class NodeType(StrEnum):
|
|||||||
SemanticModel = "semantic_model"
|
SemanticModel = "semantic_model"
|
||||||
Unit = "unit_test"
|
Unit = "unit_test"
|
||||||
Fixture = "fixture"
|
Fixture = "fixture"
|
||||||
|
Function = "function"
|
||||||
|
|
||||||
def pluralize(self) -> str:
|
def pluralize(self) -> str:
|
||||||
if self is self.Analysis:
|
if self is self.Analysis:
|
||||||
@@ -68,3 +69,25 @@ class TimePeriod(StrEnum):
|
|||||||
|
|
||||||
def plural(self) -> str:
|
def plural(self) -> str:
|
||||||
return str(self) + "s"
|
return str(self) + "s"
|
||||||
|
|
||||||
|
|
||||||
|
class BatchSize(StrEnum):
|
||||||
|
hour = "hour"
|
||||||
|
day = "day"
|
||||||
|
month = "month"
|
||||||
|
year = "year"
|
||||||
|
|
||||||
|
def plural(self) -> str:
|
||||||
|
return str(self) + "s"
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionType(StrEnum):
|
||||||
|
Scalar = "scalar"
|
||||||
|
Aggregate = "aggregate"
|
||||||
|
Table = "table"
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionVolatility(StrEnum):
|
||||||
|
Deterministic = "deterministic"
|
||||||
|
Stable = "stable"
|
||||||
|
NonDeterministic = "non-deterministic"
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from dbt.artifacts.resources.v1.components import CompiledResource
|
|
||||||
from typing import Literal
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
|
from dbt.artifacts.resources.v1.components import CompiledResource
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
23
core/dbt/artifacts/resources/v1/catalog.py
Normal file
23
core/dbt/artifacts/resources/v1/catalog.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from dbt.adapters.catalogs import CatalogIntegrationConfig
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CatalogWriteIntegrationConfig(CatalogIntegrationConfig):
|
||||||
|
name: str
|
||||||
|
catalog_type: str
|
||||||
|
external_volume: Optional[str] = None
|
||||||
|
table_format: Optional[str] = None
|
||||||
|
catalog_name: Optional[str] = None
|
||||||
|
file_format: Optional[str] = None
|
||||||
|
adapter_properties: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Catalog(dbtClassMixin):
|
||||||
|
name: str
|
||||||
|
active_write_integration: Optional[str] = None
|
||||||
|
write_integrations: List[CatalogWriteIntegrationConfig] = field(default_factory=list)
|
||||||
@@ -1,20 +1,35 @@
|
|||||||
import time
|
import time
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from dbt.artifacts.resources.base import GraphResource, FileHash, Docs
|
from datetime import timedelta
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
|
from dbt.artifacts.resources.base import Docs, FileHash, GraphResource
|
||||||
|
from dbt.artifacts.resources.types import NodeType, TimePeriod
|
||||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
from dbt_common.contracts.config.base import BaseConfig, MergeBehavior
|
||||||
from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
||||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||||
from typing import Dict, List, Optional, Union, Any
|
|
||||||
from datetime import timedelta
|
|
||||||
from dbt.artifacts.resources.types import TimePeriod
|
|
||||||
from dbt_common.contracts.util import Mergeable
|
from dbt_common.contracts.util import Mergeable
|
||||||
|
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||||
|
|
||||||
NodeVersion = Union[str, float]
|
NodeVersion = Union[str, float]
|
||||||
|
|
||||||
|
|
||||||
|
def _backcompat_doc_blocks(doc_blocks: Any) -> List[str]:
|
||||||
|
"""
|
||||||
|
Make doc_blocks backwards-compatible for scenarios where a user specifies `doc_blocks` on a model or column.
|
||||||
|
Mashumaro will raise a serialization error if the specified `doc_blocks` isn't a list of strings.
|
||||||
|
In such a scenario, this method returns an empty list to avoid a serialization error.
|
||||||
|
Further along, `_get_doc_blocks` in `manifest.py` populates the correct `doc_blocks` for the happy path.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(doc_blocks, list) and all(isinstance(x, str) for x in doc_blocks):
|
||||||
|
return doc_blocks
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MacroDependsOn(dbtClassMixin):
|
class MacroDependsOn(dbtClassMixin):
|
||||||
macros: List[str] = field(default_factory=list)
|
macros: List[str] = field(default_factory=list)
|
||||||
@@ -55,6 +70,12 @@ class RefArgs(dbtClassMixin):
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ColumnConfig(BaseConfig):
|
||||||
|
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||||
|
tags: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
||||||
"""Used in all ManifestNodes and SourceDefinition"""
|
"""Used in all ManifestNodes and SourceDefinition"""
|
||||||
@@ -65,8 +86,16 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
|||||||
data_type: Optional[str] = None
|
data_type: Optional[str] = None
|
||||||
constraints: List[ColumnLevelConstraint] = field(default_factory=list)
|
constraints: List[ColumnLevelConstraint] = field(default_factory=list)
|
||||||
quote: Optional[bool] = None
|
quote: Optional[bool] = None
|
||||||
|
config: ColumnConfig = field(default_factory=ColumnConfig)
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
granularity: Optional[TimeGranularity] = None
|
||||||
|
doc_blocks: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None) -> dict:
|
||||||
|
dct = super().__post_serialize__(dct, context)
|
||||||
|
dct["doc_blocks"] = _backcompat_doc_blocks(dct["doc_blocks"])
|
||||||
|
return dct
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -193,13 +222,21 @@ class ParsedResource(ParsedResourceMandatory):
|
|||||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||||
created_at: float = field(default_factory=lambda: time.time())
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
unrendered_config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||||
relation_name: Optional[str] = None
|
relation_name: Optional[str] = None
|
||||||
raw_code: str = ""
|
raw_code: str = ""
|
||||||
|
doc_blocks: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||||
dct = super().__post_serialize__(dct, context)
|
dct = super().__post_serialize__(dct, context)
|
||||||
|
|
||||||
if context and context.get("artifact") and "config_call_dict" in dct:
|
if context and context.get("artifact") and "config_call_dict" in dct:
|
||||||
del dct["config_call_dict"]
|
del dct["config_call_dict"]
|
||||||
|
if context and context.get("artifact") and "unrendered_config_call_dict" in dct:
|
||||||
|
del dct["unrendered_config_call_dict"]
|
||||||
|
|
||||||
|
dct["doc_blocks"] = _backcompat_doc_blocks(dct["doc_blocks"])
|
||||||
|
|
||||||
return dct
|
return dct
|
||||||
|
|
||||||
|
|
||||||
@@ -212,6 +249,7 @@ class CompiledResource(ParsedResource):
|
|||||||
refs: List[RefArgs] = field(default_factory=list)
|
refs: List[RefArgs] = field(default_factory=list)
|
||||||
sources: List[List[str]] = field(default_factory=list)
|
sources: List[List[str]] = field(default_factory=list)
|
||||||
metrics: List[List[str]] = field(default_factory=list)
|
metrics: List[List[str]] = field(default_factory=list)
|
||||||
|
functions: List[List[str]] = field(default_factory=list)
|
||||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||||
compiled_path: Optional[str] = None
|
compiled_path: Optional[str] = None
|
||||||
compiled: bool = False
|
compiled: bool = False
|
||||||
|
|||||||
@@ -1,21 +1,18 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin, ValidationError
|
|
||||||
from typing import Optional, List, Any, Dict, Union
|
|
||||||
from typing_extensions import Annotated
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from dbt_common.contracts.config.base import (
|
from typing import Any, Dict, List, Optional, Union
|
||||||
BaseConfig,
|
|
||||||
CompareBehavior,
|
from mashumaro.jsonschema.annotations import Pattern
|
||||||
MergeBehavior,
|
from typing_extensions import Annotated
|
||||||
)
|
|
||||||
from dbt_common.contracts.config.metadata import Metadata, ShowBehavior
|
from dbt import hooks
|
||||||
from dbt_common.contracts.config.materialization import OnConfigurationChangeOption
|
|
||||||
from dbt.artifacts.resources.base import Docs
|
from dbt.artifacts.resources.base import Docs
|
||||||
from dbt.artifacts.resources.types import ModelHookType
|
from dbt.artifacts.resources.types import ModelHookType
|
||||||
from dbt.artifacts.utils.validation import validate_color
|
from dbt.artifacts.utils.validation import validate_color
|
||||||
from dbt import hooks
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
from mashumaro.jsonschema.annotations import Pattern
|
from dbt_common.contracts.config.materialization import OnConfigurationChangeOption
|
||||||
|
from dbt_common.contracts.config.metadata import Metadata, ShowBehavior
|
||||||
|
from dbt_common.dataclass_schema import ValidationError, dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
def list_str() -> List[str]:
|
def list_str() -> List[str]:
|
||||||
@@ -83,6 +80,9 @@ class NodeConfig(NodeAndTestConfig):
|
|||||||
# 'mergebehavior' dictionary
|
# 'mergebehavior' dictionary
|
||||||
materialized: str = "view"
|
materialized: str = "view"
|
||||||
incremental_strategy: Optional[str] = None
|
incremental_strategy: Optional[str] = None
|
||||||
|
batch_size: Any = None
|
||||||
|
lookback: Any = 1
|
||||||
|
begin: Any = None
|
||||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||||
post_hook: List[Hook] = field(
|
post_hook: List[Hook] = field(
|
||||||
default_factory=list,
|
default_factory=list,
|
||||||
@@ -125,6 +125,8 @@ class NodeConfig(NodeAndTestConfig):
|
|||||||
default_factory=ContractConfig,
|
default_factory=ContractConfig,
|
||||||
metadata=MergeBehavior.Update.meta(),
|
metadata=MergeBehavior.Update.meta(),
|
||||||
)
|
)
|
||||||
|
event_time: Any = None
|
||||||
|
concurrent_batches: Any = None
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||||
@@ -179,7 +181,7 @@ class TestConfig(NodeAndTestConfig):
|
|||||||
warn_if: str = "!= 0"
|
warn_if: str = "!= 0"
|
||||||
error_if: str = "!= 0"
|
error_if: str = "!= 0"
|
||||||
|
|
||||||
def __post_init__(self):
|
def finalize_and_validate(self):
|
||||||
"""
|
"""
|
||||||
The presence of a setting for `store_failures_as` overrides any existing setting for `store_failures`,
|
The presence of a setting for `store_failures_as` overrides any existing setting for `store_failures`,
|
||||||
regardless of level of granularity. If `store_failures_as` is not set, then `store_failures` takes effect.
|
regardless of level of granularity. If `store_failures_as` is not set, then `store_failures` takes effect.
|
||||||
@@ -205,6 +207,7 @@ class TestConfig(NodeAndTestConfig):
|
|||||||
but still allow for backwards compatibility for `store_failures`.
|
but still allow for backwards compatibility for `store_failures`.
|
||||||
See https://github.com/dbt-labs/dbt-core/issues/6914 for more information.
|
See https://github.com/dbt-labs/dbt-core/issues/6914 for more information.
|
||||||
"""
|
"""
|
||||||
|
super().finalize_and_validate()
|
||||||
|
|
||||||
# if `store_failures_as` is not set, it gets set by `store_failures`
|
# if `store_failures_as` is not set, it gets set by `store_failures`
|
||||||
# the settings below mimic existing behavior prior to `store_failures_as`
|
# the settings below mimic existing behavior prior to `store_failures_as`
|
||||||
@@ -227,6 +230,8 @@ class TestConfig(NodeAndTestConfig):
|
|||||||
else:
|
else:
|
||||||
self.store_failures = get_store_failures_map.get(self.store_failures_as, True)
|
self.store_failures = get_store_failures_map.get(self.store_failures_as, True)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool:
|
def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool:
|
||||||
"""This is like __eq__, except it explicitly checks certain fields."""
|
"""This is like __eq__, except it explicitly checks certain fields."""
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user